sha
stringlengths
40
40
text
stringlengths
1
13.4M
id
stringlengths
2
117
tags
sequencelengths
1
7.91k
created_at
stringlengths
25
25
metadata
stringlengths
2
875k
last_modified
stringlengths
25
25
arxiv
sequencelengths
0
25
languages
sequencelengths
0
7.91k
tags_str
stringlengths
17
159k
text_str
stringlengths
1
447k
text_lists
sequencelengths
0
352
processed_texts
sequencelengths
1
353
fe3cc8297aa0e3d8923d9de0766fbcff51626898
# Dataset Card for "Calc-ape210k_selftrain_experiment" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
MU-NLPC/Calc-ape210k_selftrain_experiment
[ "region:us" ]
2024-01-22T17:21:24+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "question_chinese", "dtype": "string"}, {"name": "chain", "dtype": "string"}, {"name": "result", "dtype": "string"}, {"name": "result_float", "dtype": "float64"}, {"name": "equation", "dtype": "string"}, {"name": "model_checkpoint", "dtype": "string"}, {"name": "correct_1", "dtype": "string"}, {"name": "correct_2", "dtype": "string"}, {"name": "incorrect_1", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 34869989, "num_examples": 24097}], "download_size": 14618895, "dataset_size": 34869989}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-22T22:47:11+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Calc-ape210k_selftrain_experiment" More Information needed
[ "# Dataset Card for \"Calc-ape210k_selftrain_experiment\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Calc-ape210k_selftrain_experiment\"\n\nMore Information needed" ]
99c89df895b996e218e97e32b28ce37a2609f441
# Dataset Card for Evaluation run of abhishekchohan/mistral-7B-forest-merge-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [abhishekchohan/mistral-7B-forest-merge-v0.1](https://huggingface.co/abhishekchohan/mistral-7B-forest-merge-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-merge-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T17:22:14.145358](https://huggingface.co/datasets/open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-merge-v0.1/blob/main/results_2024-01-22T17-22-14.145358.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6025264269069641, "acc_stderr": 0.032921649449251675, "acc_norm": 0.6050419928736916, "acc_norm_stderr": 0.033582448395703776, "mc1": 0.423500611995104, "mc1_stderr": 0.01729742144853473, "mc2": 0.5852690107055646, "mc2_stderr": 0.01561479793889522 }, "harness|arc:challenge|25": { "acc": 0.6040955631399317, "acc_stderr": 0.014291228393536588, "acc_norm": 0.6279863481228669, "acc_norm_stderr": 0.014124597881844461 }, "harness|hellaswag|10": { "acc": 0.6521609241187014, "acc_stderr": 0.0047531124327286995, "acc_norm": 0.8431587333200558, "acc_norm_stderr": 0.0036290784658809796 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5703703703703704, "acc_stderr": 0.042763494943765995, "acc_norm": 0.5703703703703704, "acc_norm_stderr": 0.042763494943765995 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6513157894736842, "acc_stderr": 0.0387813988879761, "acc_norm": 0.6513157894736842, "acc_norm_stderr": 0.0387813988879761 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6679245283018868, "acc_stderr": 0.02898545565233439, "acc_norm": 0.6679245283018868, "acc_norm_stderr": 0.02898545565233439 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6736111111111112, "acc_stderr": 0.03921067198982266, "acc_norm": 0.6736111111111112, "acc_norm_stderr": 0.03921067198982266 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6069364161849711, "acc_stderr": 0.0372424959581773, "acc_norm": 0.6069364161849711, "acc_norm_stderr": 0.0372424959581773 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.04724007352383888, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.04724007352383888 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816507, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816507 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5319148936170213, "acc_stderr": 0.03261936918467382, "acc_norm": 0.5319148936170213, "acc_norm_stderr": 0.03261936918467382 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.38596491228070173, "acc_stderr": 0.04579639422070434, "acc_norm": 0.38596491228070173, "acc_norm_stderr": 0.04579639422070434 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5103448275862069, "acc_stderr": 0.04165774775728763, "acc_norm": 0.5103448275862069, "acc_norm_stderr": 0.04165774775728763 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4126984126984127, "acc_stderr": 0.02535574126305527, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.02535574126305527 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04426266681379909, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04426266681379909 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7193548387096774, "acc_stderr": 0.0255606047210229, "acc_norm": 0.7193548387096774, "acc_norm_stderr": 0.0255606047210229 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.41379310344827586, "acc_stderr": 0.03465304488406795, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.03465304488406795 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7393939393939394, "acc_stderr": 0.034277431758165236, "acc_norm": 0.7393939393939394, "acc_norm_stderr": 0.034277431758165236 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7272727272727273, "acc_stderr": 0.03173071239071724, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.03173071239071724 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8134715025906736, "acc_stderr": 0.02811209121011747, "acc_norm": 0.8134715025906736, "acc_norm_stderr": 0.02811209121011747 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.558974358974359, "acc_stderr": 0.02517404838400074, "acc_norm": 0.558974358974359, "acc_norm_stderr": 0.02517404838400074 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.02794045713622841, "acc_norm": 0.3, "acc_norm_stderr": 0.02794045713622841 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5798319327731093, "acc_stderr": 0.03206183783236152, "acc_norm": 0.5798319327731093, "acc_norm_stderr": 0.03206183783236152 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7798165137614679, "acc_stderr": 0.017765978652327537, "acc_norm": 0.7798165137614679, "acc_norm_stderr": 0.017765978652327537 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4351851851851852, "acc_stderr": 0.03381200005643525, "acc_norm": 0.4351851851851852, "acc_norm_stderr": 0.03381200005643525 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.803921568627451, "acc_stderr": 0.027865942286639325, "acc_norm": 0.803921568627451, "acc_norm_stderr": 0.027865942286639325 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7932489451476793, "acc_stderr": 0.0263616516683891, "acc_norm": 0.7932489451476793, "acc_norm_stderr": 0.0263616516683891 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.031493846709941306, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.031493846709941306 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.732824427480916, "acc_stderr": 0.03880848301082394, "acc_norm": 0.732824427480916, "acc_norm_stderr": 0.03880848301082394 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6748466257668712, "acc_stderr": 0.03680350371286461, "acc_norm": 0.6748466257668712, "acc_norm_stderr": 0.03680350371286461 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.04058042015646034, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.04058042015646034 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.02280138253459754, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.02280138253459754 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.64, "acc_stderr": 0.048241815132442176, "acc_norm": 0.64, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7854406130268199, "acc_stderr": 0.014680033956893346, "acc_norm": 0.7854406130268199, "acc_norm_stderr": 0.014680033956893346 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6560693641618497, "acc_stderr": 0.025574123786546665, "acc_norm": 0.6560693641618497, "acc_norm_stderr": 0.025574123786546665 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3653631284916201, "acc_stderr": 0.016104833880142295, "acc_norm": 0.3653631284916201, "acc_norm_stderr": 0.016104833880142295 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6405228758169934, "acc_stderr": 0.027475969910660952, "acc_norm": 0.6405228758169934, "acc_norm_stderr": 0.027475969910660952 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7009646302250804, "acc_stderr": 0.02600330111788514, "acc_norm": 0.7009646302250804, "acc_norm_stderr": 0.02600330111788514 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6851851851851852, "acc_stderr": 0.025842248700902168, "acc_norm": 0.6851851851851852, "acc_norm_stderr": 0.025842248700902168 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4326241134751773, "acc_stderr": 0.02955545423677885, "acc_norm": 0.4326241134751773, "acc_norm_stderr": 0.02955545423677885 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.43741851368970014, "acc_stderr": 0.012669813464935726, "acc_norm": 0.43741851368970014, "acc_norm_stderr": 0.012669813464935726 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6176470588235294, "acc_stderr": 0.02952009569768776, "acc_norm": 0.6176470588235294, "acc_norm_stderr": 0.02952009569768776 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6143790849673203, "acc_stderr": 0.019691459052354025, "acc_norm": 0.6143790849673203, "acc_norm_stderr": 0.019691459052354025 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.673469387755102, "acc_stderr": 0.03002105623844031, "acc_norm": 0.673469387755102, "acc_norm_stderr": 0.03002105623844031 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7611940298507462, "acc_stderr": 0.030147775935409217, "acc_norm": 0.7611940298507462, "acc_norm_stderr": 0.030147775935409217 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.03588702812826371, "acc_norm": 0.85, "acc_norm_stderr": 0.03588702812826371 }, "harness|hendrycksTest-virology|5": { "acc": 0.4879518072289157, "acc_stderr": 0.03891364495835821, "acc_norm": 0.4879518072289157, "acc_norm_stderr": 0.03891364495835821 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.029913127232368036, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.029913127232368036 }, "harness|truthfulqa:mc|0": { "mc1": 0.423500611995104, "mc1_stderr": 0.01729742144853473, "mc2": 0.5852690107055646, "mc2_stderr": 0.01561479793889522 }, "harness|winogrande|5": { "acc": 0.7719021310181531, "acc_stderr": 0.011793015817663597 }, "harness|gsm8k|5": { "acc": 0.49962092494313876, "acc_stderr": 0.013772480761626167 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-merge-v0.1
[ "region:us" ]
2024-01-22T17:24:34+00:00
{"pretty_name": "Evaluation run of abhishekchohan/mistral-7B-forest-merge-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [abhishekchohan/mistral-7B-forest-merge-v0.1](https://huggingface.co/abhishekchohan/mistral-7B-forest-merge-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-merge-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T17:22:14.145358](https://huggingface.co/datasets/open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-merge-v0.1/blob/main/results_2024-01-22T17-22-14.145358.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6025264269069641,\n \"acc_stderr\": 0.032921649449251675,\n \"acc_norm\": 0.6050419928736916,\n \"acc_norm_stderr\": 0.033582448395703776,\n \"mc1\": 0.423500611995104,\n \"mc1_stderr\": 0.01729742144853473,\n \"mc2\": 0.5852690107055646,\n \"mc2_stderr\": 0.01561479793889522\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6040955631399317,\n \"acc_stderr\": 0.014291228393536588,\n \"acc_norm\": 0.6279863481228669,\n \"acc_norm_stderr\": 0.014124597881844461\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6521609241187014,\n \"acc_stderr\": 0.0047531124327286995,\n \"acc_norm\": 0.8431587333200558,\n \"acc_norm_stderr\": 0.0036290784658809796\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5703703703703704,\n \"acc_stderr\": 0.042763494943765995,\n \"acc_norm\": 0.5703703703703704,\n \"acc_norm_stderr\": 0.042763494943765995\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6513157894736842,\n \"acc_stderr\": 0.0387813988879761,\n \"acc_norm\": 0.6513157894736842,\n \"acc_norm_stderr\": 0.0387813988879761\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6679245283018868,\n \"acc_stderr\": 0.02898545565233439,\n \"acc_norm\": 0.6679245283018868,\n \"acc_norm_stderr\": 0.02898545565233439\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6736111111111112,\n \"acc_stderr\": 0.03921067198982266,\n \"acc_norm\": 0.6736111111111112,\n \"acc_norm_stderr\": 0.03921067198982266\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6069364161849711,\n \"acc_stderr\": 0.0372424959581773,\n \"acc_norm\": 0.6069364161849711,\n \"acc_norm_stderr\": 0.0372424959581773\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.04724007352383888,\n \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.04724007352383888\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816507,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816507\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5319148936170213,\n \"acc_stderr\": 0.03261936918467382,\n \"acc_norm\": 0.5319148936170213,\n \"acc_norm_stderr\": 0.03261936918467382\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.38596491228070173,\n \"acc_stderr\": 0.04579639422070434,\n \"acc_norm\": 0.38596491228070173,\n \"acc_norm_stderr\": 0.04579639422070434\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5103448275862069,\n \"acc_stderr\": 0.04165774775728763,\n \"acc_norm\": 0.5103448275862069,\n \"acc_norm_stderr\": 0.04165774775728763\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4126984126984127,\n \"acc_stderr\": 0.02535574126305527,\n \"acc_norm\": 0.4126984126984127,\n \"acc_norm_stderr\": 0.02535574126305527\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04426266681379909,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04426266681379909\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7193548387096774,\n \"acc_stderr\": 0.0255606047210229,\n \"acc_norm\": 0.7193548387096774,\n \"acc_norm_stderr\": 0.0255606047210229\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.41379310344827586,\n \"acc_stderr\": 0.03465304488406795,\n \"acc_norm\": 0.41379310344827586,\n \"acc_norm_stderr\": 0.03465304488406795\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7393939393939394,\n \"acc_stderr\": 0.034277431758165236,\n \"acc_norm\": 0.7393939393939394,\n \"acc_norm_stderr\": 0.034277431758165236\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.03173071239071724,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03173071239071724\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8134715025906736,\n \"acc_stderr\": 0.02811209121011747,\n \"acc_norm\": 0.8134715025906736,\n \"acc_norm_stderr\": 0.02811209121011747\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.558974358974359,\n \"acc_stderr\": 0.02517404838400074,\n \"acc_norm\": 0.558974358974359,\n \"acc_norm_stderr\": 0.02517404838400074\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.02794045713622841,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.02794045713622841\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5798319327731093,\n \"acc_stderr\": 0.03206183783236152,\n \"acc_norm\": 0.5798319327731093,\n \"acc_norm_stderr\": 0.03206183783236152\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7798165137614679,\n \"acc_stderr\": 0.017765978652327537,\n \"acc_norm\": 0.7798165137614679,\n \"acc_norm_stderr\": 0.017765978652327537\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4351851851851852,\n \"acc_stderr\": 0.03381200005643525,\n \"acc_norm\": 0.4351851851851852,\n \"acc_norm_stderr\": 0.03381200005643525\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.803921568627451,\n \"acc_stderr\": 0.027865942286639325,\n \"acc_norm\": 0.803921568627451,\n \"acc_norm_stderr\": 0.027865942286639325\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7932489451476793,\n \"acc_stderr\": 0.0263616516683891,\n \"acc_norm\": 0.7932489451476793,\n \"acc_norm_stderr\": 0.0263616516683891\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n \"acc_stderr\": 0.031493846709941306,\n \"acc_norm\": 0.672645739910314,\n \"acc_norm_stderr\": 0.031493846709941306\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.732824427480916,\n \"acc_stderr\": 0.03880848301082394,\n \"acc_norm\": 0.732824427480916,\n \"acc_norm_stderr\": 0.03880848301082394\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6748466257668712,\n \"acc_stderr\": 0.03680350371286461,\n \"acc_norm\": 0.6748466257668712,\n \"acc_norm_stderr\": 0.03680350371286461\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.04058042015646034,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.04058042015646034\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.02280138253459754,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.02280138253459754\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7854406130268199,\n \"acc_stderr\": 0.014680033956893346,\n \"acc_norm\": 0.7854406130268199,\n \"acc_norm_stderr\": 0.014680033956893346\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6560693641618497,\n \"acc_stderr\": 0.025574123786546665,\n \"acc_norm\": 0.6560693641618497,\n \"acc_norm_stderr\": 0.025574123786546665\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3653631284916201,\n \"acc_stderr\": 0.016104833880142295,\n \"acc_norm\": 0.3653631284916201,\n \"acc_norm_stderr\": 0.016104833880142295\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6405228758169934,\n \"acc_stderr\": 0.027475969910660952,\n \"acc_norm\": 0.6405228758169934,\n \"acc_norm_stderr\": 0.027475969910660952\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7009646302250804,\n \"acc_stderr\": 0.02600330111788514,\n \"acc_norm\": 0.7009646302250804,\n \"acc_norm_stderr\": 0.02600330111788514\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6851851851851852,\n \"acc_stderr\": 0.025842248700902168,\n \"acc_norm\": 0.6851851851851852,\n \"acc_norm_stderr\": 0.025842248700902168\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4326241134751773,\n \"acc_stderr\": 0.02955545423677885,\n \"acc_norm\": 0.4326241134751773,\n \"acc_norm_stderr\": 0.02955545423677885\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.43741851368970014,\n \"acc_stderr\": 0.012669813464935726,\n \"acc_norm\": 0.43741851368970014,\n \"acc_norm_stderr\": 0.012669813464935726\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6176470588235294,\n \"acc_stderr\": 0.02952009569768776,\n \"acc_norm\": 0.6176470588235294,\n \"acc_norm_stderr\": 0.02952009569768776\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6143790849673203,\n \"acc_stderr\": 0.019691459052354025,\n \"acc_norm\": 0.6143790849673203,\n \"acc_norm_stderr\": 0.019691459052354025\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.673469387755102,\n \"acc_stderr\": 0.03002105623844031,\n \"acc_norm\": 0.673469387755102,\n \"acc_norm_stderr\": 0.03002105623844031\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7611940298507462,\n \"acc_stderr\": 0.030147775935409217,\n \"acc_norm\": 0.7611940298507462,\n \"acc_norm_stderr\": 0.030147775935409217\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4879518072289157,\n \"acc_stderr\": 0.03891364495835821,\n \"acc_norm\": 0.4879518072289157,\n \"acc_norm_stderr\": 0.03891364495835821\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.029913127232368036,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.029913127232368036\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.423500611995104,\n \"mc1_stderr\": 0.01729742144853473,\n \"mc2\": 0.5852690107055646,\n \"mc2_stderr\": 0.01561479793889522\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7719021310181531,\n \"acc_stderr\": 0.011793015817663597\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.49962092494313876,\n \"acc_stderr\": 0.013772480761626167\n }\n}\n```", "repo_url": "https://huggingface.co/abhishekchohan/mistral-7B-forest-merge-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|arc:challenge|25_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|gsm8k|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hellaswag|10_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T17-22-14.145358.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["**/details_harness|winogrande|5_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T17-22-14.145358.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T17_22_14.145358", "path": ["results_2024-01-22T17-22-14.145358.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T17-22-14.145358.parquet"]}]}]}
2024-01-22T17:24:57+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of abhishekchohan/mistral-7B-forest-merge-v0.1 Dataset automatically created during the evaluation run of model abhishekchohan/mistral-7B-forest-merge-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T17:22:14.145358(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of abhishekchohan/mistral-7B-forest-merge-v0.1\n\n\n\nDataset automatically created during the evaluation run of model abhishekchohan/mistral-7B-forest-merge-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T17:22:14.145358(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of abhishekchohan/mistral-7B-forest-merge-v0.1\n\n\n\nDataset automatically created during the evaluation run of model abhishekchohan/mistral-7B-forest-merge-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T17:22:14.145358(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
907decd529501dd524c2b8ee3374bfd6f2ae5d24
**French-Public Domain-Book** or **French-PD-Books** is a large collection aiming to agregate all the French monographies in the public domain. The collection has been originally compiled by Pierre-Carl Langlais, on the basis of a large corpus curated by Benoît de Courson, Benjamin Azoulay for [Gallicagram](https://shiny.ens-paris-saclay.fr/app/gallicagram) and in cooperation with OpenLLMFrance. Gallicagram is leading cultural analytics project giving access to word and ngram search on very large cultural heritage datasets in French and other languages. ## Content As of January 2024, the collection contains 289,000 books (16,407,292,362 words) from the French National Library (Gallica). Each parquet file has the full text of 2,000 books selected at random and few core metadatas (Gallica id, title, author, word counts…). The metadata can be easily expanded thanks to the BNF API. This initial agregation was made possible thanks to the open data program of the French National Library and the consolidation of public domain status for cultural heritage works in the EU with the 2019 Copyright Directive (art. 14) The composition of the dataset adheres to the criteria for public domain works in the EU and, consequently, all Berne-countries for EU authors: any publication whose author is dead for more than 70 years. ## Uses The primary use of the collection is for cultural analytics project on a wide scale. It is already in use by the Gallicagram project, an open and significantly enhanced version of ngram viewer. The collection also aims to expand the availability of open works for the training of Large Language Models. The text can be used for model training and republished without restriction for reproducibility purposes. ## License The entire collection is in the public domain everywhere. This means that the patrimonial rights of each individual or collective rightholders have expired. The French National Library claims additional rights in its terms of use and restricts commercial use: "La réutilisation commerciale de ces contenus est payante et fait l'objet d'une licence. Est entendue par réutilisation commerciale la revente de contenus sous forme de produits élaborés ou de fourniture de service ou toute autre réutilisation des contenus générant directement des revenus." There has been a debate for years in Europe over the definition of public domain and the possibility to restrict its use. Since 2019, the EU Copyright Directive states that "Member States shall provide that, when the term of protection of a work of visual art has expired, any material resulting from an act of reproduction of that work is not subject to copyright or related rights, unless the material resulting from that act of reproduction is original in the sense that it is the author's own intellectual creation." (art. 14) ## Future developments This dataset is not a one time work but will continue to evolve significantly on three directions: * Correction of computer generated errors in the text. All the texts have been transcribed automatically through the use of Optical Character Recognition (OCR) software. The original files have been digitized over a long time period (since the mid-2000s) and some documents should be. Future versions will strive either to re-OCRize the original text or use experimental LLM models for partial OCR correction. * Enhancement of the structure/editorial presentation of the original text. Some parts of the original documents are likely unwanted for large scale analysis or model training (header, page count…). Additionally, some advanced document structures like tables or multi-column layout are unlikely to be well formatted. Major enhancements could be experted through applying new SOTA layout recognition models (like COLAF) on the original PDF files. * Expansion of the collection to other cultural heritage holdings, especially coming from Hathi Trust, Internet Archive and Google Books.
PleIAs/French-PD-Books
[ "task_categories:text-generation", "language:fr", "ocr", "region:us" ]
2024-01-22T17:34:00+00:00
{"language": ["fr"], "task_categories": ["text-generation"], "pretty_name": "French-Public Domain-Book", "tags": ["ocr"]}
2024-02-03T23:52:45+00:00
[]
[ "fr" ]
TAGS #task_categories-text-generation #language-French #ocr #region-us
French-Public Domain-Book or French-PD-Books is a large collection aiming to agregate all the French monographies in the public domain. The collection has been originally compiled by Pierre-Carl Langlais, on the basis of a large corpus curated by Benoît de Courson, Benjamin Azoulay for Gallicagram and in cooperation with OpenLLMFrance. Gallicagram is leading cultural analytics project giving access to word and ngram search on very large cultural heritage datasets in French and other languages. ## Content As of January 2024, the collection contains 289,000 books (16,407,292,362 words) from the French National Library (Gallica). Each parquet file has the full text of 2,000 books selected at random and few core metadatas (Gallica id, title, author, word counts…). The metadata can be easily expanded thanks to the BNF API. This initial agregation was made possible thanks to the open data program of the French National Library and the consolidation of public domain status for cultural heritage works in the EU with the 2019 Copyright Directive (art. 14) The composition of the dataset adheres to the criteria for public domain works in the EU and, consequently, all Berne-countries for EU authors: any publication whose author is dead for more than 70 years. ## Uses The primary use of the collection is for cultural analytics project on a wide scale. It is already in use by the Gallicagram project, an open and significantly enhanced version of ngram viewer. The collection also aims to expand the availability of open works for the training of Large Language Models. The text can be used for model training and republished without restriction for reproducibility purposes. ## License The entire collection is in the public domain everywhere. This means that the patrimonial rights of each individual or collective rightholders have expired. The French National Library claims additional rights in its terms of use and restricts commercial use: "La réutilisation commerciale de ces contenus est payante et fait l'objet d'une licence. Est entendue par réutilisation commerciale la revente de contenus sous forme de produits élaborés ou de fourniture de service ou toute autre réutilisation des contenus générant directement des revenus." There has been a debate for years in Europe over the definition of public domain and the possibility to restrict its use. Since 2019, the EU Copyright Directive states that "Member States shall provide that, when the term of protection of a work of visual art has expired, any material resulting from an act of reproduction of that work is not subject to copyright or related rights, unless the material resulting from that act of reproduction is original in the sense that it is the author's own intellectual creation." (art. 14) ## Future developments This dataset is not a one time work but will continue to evolve significantly on three directions: * Correction of computer generated errors in the text. All the texts have been transcribed automatically through the use of Optical Character Recognition (OCR) software. The original files have been digitized over a long time period (since the mid-2000s) and some documents should be. Future versions will strive either to re-OCRize the original text or use experimental LLM models for partial OCR correction. * Enhancement of the structure/editorial presentation of the original text. Some parts of the original documents are likely unwanted for large scale analysis or model training (header, page count…). Additionally, some advanced document structures like tables or multi-column layout are unlikely to be well formatted. Major enhancements could be experted through applying new SOTA layout recognition models (like COLAF) on the original PDF files. * Expansion of the collection to other cultural heritage holdings, especially coming from Hathi Trust, Internet Archive and Google Books.
[ "## Content\nAs of January 2024, the collection contains 289,000 books (16,407,292,362 words) from the French National Library (Gallica). Each parquet file has the full text of 2,000 books selected at random and few core metadatas (Gallica id, title, author, word counts…). The metadata can be easily expanded thanks to the BNF API.\n\nThis initial agregation was made possible thanks to the open data program of the French National Library and the consolidation of public domain status for cultural heritage works in the EU with the 2019 Copyright Directive (art. 14)\n\nThe composition of the dataset adheres to the criteria for public domain works in the EU and, consequently, all Berne-countries for EU authors: any publication whose author is dead for more than 70 years.", "## Uses\nThe primary use of the collection is for cultural analytics project on a wide scale. It is already in use by the Gallicagram project, an open and significantly enhanced version of ngram viewer.\n\nThe collection also aims to expand the availability of open works for the training of Large Language Models. The text can be used for model training and republished without restriction for reproducibility purposes.", "## License\nThe entire collection is in the public domain everywhere. This means that the patrimonial rights of each individual or collective rightholders have expired.\n\nThe French National Library claims additional rights in its terms of use and restricts commercial use: \"La réutilisation commerciale de ces contenus est payante et fait l'objet d'une licence. Est entendue par réutilisation commerciale la revente de contenus sous forme de produits élaborés ou de fourniture de service ou toute autre réutilisation des contenus générant directement des revenus.\"\n\nThere has been a debate for years in Europe over the definition of public domain and the possibility to restrict its use. Since 2019, the EU Copyright Directive states that \"Member States shall provide that, when the term of protection of a work of visual art has expired, any material resulting from an act of reproduction of that work is not subject to copyright or related rights, unless the material resulting from that act of reproduction is original in the sense that it is the author's own intellectual creation.\" (art. 14)", "## Future developments\nThis dataset is not a one time work but will continue to evolve significantly on three directions:\n* Correction of computer generated errors in the text. All the texts have been transcribed automatically through the use of Optical Character Recognition (OCR) software. The original files have been digitized over a long time period (since the mid-2000s) and some documents should be. Future versions will strive either to re-OCRize the original text or use experimental LLM models for partial OCR correction.\n* Enhancement of the structure/editorial presentation of the original text. Some parts of the original documents are likely unwanted for large scale analysis or model training (header, page count…). Additionally, some advanced document structures like tables or multi-column layout are unlikely to be well formatted. Major enhancements could be experted through applying new SOTA layout recognition models (like COLAF) on the original PDF files.\n* Expansion of the collection to other cultural heritage holdings, especially coming from Hathi Trust, Internet Archive and Google Books." ]
[ "TAGS\n#task_categories-text-generation #language-French #ocr #region-us \n", "## Content\nAs of January 2024, the collection contains 289,000 books (16,407,292,362 words) from the French National Library (Gallica). Each parquet file has the full text of 2,000 books selected at random and few core metadatas (Gallica id, title, author, word counts…). The metadata can be easily expanded thanks to the BNF API.\n\nThis initial agregation was made possible thanks to the open data program of the French National Library and the consolidation of public domain status for cultural heritage works in the EU with the 2019 Copyright Directive (art. 14)\n\nThe composition of the dataset adheres to the criteria for public domain works in the EU and, consequently, all Berne-countries for EU authors: any publication whose author is dead for more than 70 years.", "## Uses\nThe primary use of the collection is for cultural analytics project on a wide scale. It is already in use by the Gallicagram project, an open and significantly enhanced version of ngram viewer.\n\nThe collection also aims to expand the availability of open works for the training of Large Language Models. The text can be used for model training and republished without restriction for reproducibility purposes.", "## License\nThe entire collection is in the public domain everywhere. This means that the patrimonial rights of each individual or collective rightholders have expired.\n\nThe French National Library claims additional rights in its terms of use and restricts commercial use: \"La réutilisation commerciale de ces contenus est payante et fait l'objet d'une licence. Est entendue par réutilisation commerciale la revente de contenus sous forme de produits élaborés ou de fourniture de service ou toute autre réutilisation des contenus générant directement des revenus.\"\n\nThere has been a debate for years in Europe over the definition of public domain and the possibility to restrict its use. Since 2019, the EU Copyright Directive states that \"Member States shall provide that, when the term of protection of a work of visual art has expired, any material resulting from an act of reproduction of that work is not subject to copyright or related rights, unless the material resulting from that act of reproduction is original in the sense that it is the author's own intellectual creation.\" (art. 14)", "## Future developments\nThis dataset is not a one time work but will continue to evolve significantly on three directions:\n* Correction of computer generated errors in the text. All the texts have been transcribed automatically through the use of Optical Character Recognition (OCR) software. The original files have been digitized over a long time period (since the mid-2000s) and some documents should be. Future versions will strive either to re-OCRize the original text or use experimental LLM models for partial OCR correction.\n* Enhancement of the structure/editorial presentation of the original text. Some parts of the original documents are likely unwanted for large scale analysis or model training (header, page count…). Additionally, some advanced document structures like tables or multi-column layout are unlikely to be well formatted. Major enhancements could be experted through applying new SOTA layout recognition models (like COLAF) on the original PDF files.\n* Expansion of the collection to other cultural heritage holdings, especially coming from Hathi Trust, Internet Archive and Google Books." ]
28ce943ce6037073831c285860c52ed37f916346
# Dataset Card for Evaluation run of silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3](https://huggingface.co/silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_silvercoder45__Mistral-7b-instruct-v0.2-summ-sft-dpo-e3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T17:40:11.975482](https://huggingface.co/datasets/open-llm-leaderboard/details_silvercoder45__Mistral-7b-instruct-v0.2-summ-sft-dpo-e3/blob/main/results_2024-01-22T17-40-11.975482.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6023570573610609, "acc_stderr": 0.03329909478004723, "acc_norm": 0.6073390560678269, "acc_norm_stderr": 0.03397721718797508, "mc1": 0.5042839657282742, "mc1_stderr": 0.01750285857737126, "mc2": 0.6626283921742918, "mc2_stderr": 0.015319646142052617 }, "harness|arc:challenge|25": { "acc": 0.5452218430034129, "acc_stderr": 0.014551507060836357, "acc_norm": 0.5887372013651877, "acc_norm_stderr": 0.014379441068522089 }, "harness|hellaswag|10": { "acc": 0.6437960565624378, "acc_stderr": 0.004778978031389639, "acc_norm": 0.8355905198167696, "acc_norm_stderr": 0.0036988923883801003 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5555555555555556, "acc_stderr": 0.04292596718256981, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.04292596718256981 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5986842105263158, "acc_stderr": 0.039889037033362836, "acc_norm": 0.5986842105263158, "acc_norm_stderr": 0.039889037033362836 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6792452830188679, "acc_stderr": 0.028727502957880267, "acc_norm": 0.6792452830188679, "acc_norm_stderr": 0.028727502957880267 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6805555555555556, "acc_stderr": 0.038990736873573344, "acc_norm": 0.6805555555555556, "acc_norm_stderr": 0.038990736873573344 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6011560693641619, "acc_stderr": 0.0373362665538351, "acc_norm": 0.6011560693641619, "acc_norm_stderr": 0.0373362665538351 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.47058823529411764, "acc_stderr": 0.04966570903978529, "acc_norm": 0.47058823529411764, "acc_norm_stderr": 0.04966570903978529 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5148936170212766, "acc_stderr": 0.03267151848924777, "acc_norm": 0.5148936170212766, "acc_norm_stderr": 0.03267151848924777 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.42105263157894735, "acc_stderr": 0.046446020912223177, "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.046446020912223177 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555497, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555497 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3915343915343915, "acc_stderr": 0.025138091388851102, "acc_norm": 0.3915343915343915, "acc_norm_stderr": 0.025138091388851102 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7064516129032258, "acc_stderr": 0.025906087021319295, "acc_norm": 0.7064516129032258, "acc_norm_stderr": 0.025906087021319295 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.49261083743842365, "acc_stderr": 0.035176035403610084, "acc_norm": 0.49261083743842365, "acc_norm_stderr": 0.035176035403610084 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.64, "acc_stderr": 0.048241815132442176, "acc_norm": 0.64, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7272727272727273, "acc_stderr": 0.0347769116216366, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.0347769116216366 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7272727272727273, "acc_stderr": 0.03173071239071724, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.03173071239071724 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8497409326424871, "acc_stderr": 0.02578772318072387, "acc_norm": 0.8497409326424871, "acc_norm_stderr": 0.02578772318072387 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5743589743589743, "acc_stderr": 0.025069094387296532, "acc_norm": 0.5743589743589743, "acc_norm_stderr": 0.025069094387296532 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34444444444444444, "acc_stderr": 0.028972648884844267, "acc_norm": 0.34444444444444444, "acc_norm_stderr": 0.028972648884844267 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.592436974789916, "acc_stderr": 0.03191863374478466, "acc_norm": 0.592436974789916, "acc_norm_stderr": 0.03191863374478466 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7889908256880734, "acc_stderr": 0.01749392240411265, "acc_norm": 0.7889908256880734, "acc_norm_stderr": 0.01749392240411265 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.03372343271653064, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.03372343271653064 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.75, "acc_stderr": 0.03039153369274154, "acc_norm": 0.75, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7637130801687764, "acc_stderr": 0.02765215314415926, "acc_norm": 0.7637130801687764, "acc_norm_stderr": 0.02765215314415926 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6143497757847534, "acc_stderr": 0.03266842214289201, "acc_norm": 0.6143497757847534, "acc_norm_stderr": 0.03266842214289201 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7099236641221374, "acc_stderr": 0.03980066246467766, "acc_norm": 0.7099236641221374, "acc_norm_stderr": 0.03980066246467766 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.04373313040914761, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.04373313040914761 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7300613496932515, "acc_stderr": 0.034878251684978906, "acc_norm": 0.7300613496932515, "acc_norm_stderr": 0.034878251684978906 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010213, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010213 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.022509033937077788, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.022509033937077788 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7701149425287356, "acc_stderr": 0.01504630184669182, "acc_norm": 0.7701149425287356, "acc_norm_stderr": 0.01504630184669182 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6820809248554913, "acc_stderr": 0.025070713719153183, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.025070713719153183 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.34413407821229053, "acc_stderr": 0.015889221313307094, "acc_norm": 0.34413407821229053, "acc_norm_stderr": 0.015889221313307094 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6699346405228758, "acc_stderr": 0.0269256546536157, "acc_norm": 0.6699346405228758, "acc_norm_stderr": 0.0269256546536157 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6784565916398714, "acc_stderr": 0.026527724079528872, "acc_norm": 0.6784565916398714, "acc_norm_stderr": 0.026527724079528872 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6820987654320988, "acc_stderr": 0.02591006352824087, "acc_norm": 0.6820987654320988, "acc_norm_stderr": 0.02591006352824087 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.45390070921985815, "acc_stderr": 0.02970045324729146, "acc_norm": 0.45390070921985815, "acc_norm_stderr": 0.02970045324729146 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.42503259452411996, "acc_stderr": 0.012625879884892, "acc_norm": 0.42503259452411996, "acc_norm_stderr": 0.012625879884892 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5808823529411765, "acc_stderr": 0.02997280717046462, "acc_norm": 0.5808823529411765, "acc_norm_stderr": 0.02997280717046462 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6062091503267973, "acc_stderr": 0.019766211991073056, "acc_norm": 0.6062091503267973, "acc_norm_stderr": 0.019766211991073056 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.04494290866252091, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.04494290866252091 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.028535560337128448, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.028535560337128448 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8059701492537313, "acc_stderr": 0.027962677604768907, "acc_norm": 0.8059701492537313, "acc_norm_stderr": 0.027962677604768907 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-virology|5": { "acc": 0.4939759036144578, "acc_stderr": 0.03892212195333045, "acc_norm": 0.4939759036144578, "acc_norm_stderr": 0.03892212195333045 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.5042839657282742, "mc1_stderr": 0.01750285857737126, "mc2": 0.6626283921742918, "mc2_stderr": 0.015319646142052617 }, "harness|winogrande|5": { "acc": 0.7632202052091555, "acc_stderr": 0.011947592365207406 }, "harness|gsm8k|5": { "acc": 0.38286580742987114, "acc_stderr": 0.013389223491820465 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_silvercoder45__Mistral-7b-instruct-v0.2-summ-sft-dpo-e3
[ "region:us" ]
2024-01-22T17:42:33+00:00
{"pretty_name": "Evaluation run of silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3", "dataset_summary": "Dataset automatically created during the evaluation run of model [silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3](https://huggingface.co/silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_silvercoder45__Mistral-7b-instruct-v0.2-summ-sft-dpo-e3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T17:40:11.975482](https://huggingface.co/datasets/open-llm-leaderboard/details_silvercoder45__Mistral-7b-instruct-v0.2-summ-sft-dpo-e3/blob/main/results_2024-01-22T17-40-11.975482.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6023570573610609,\n \"acc_stderr\": 0.03329909478004723,\n \"acc_norm\": 0.6073390560678269,\n \"acc_norm_stderr\": 0.03397721718797508,\n \"mc1\": 0.5042839657282742,\n \"mc1_stderr\": 0.01750285857737126,\n \"mc2\": 0.6626283921742918,\n \"mc2_stderr\": 0.015319646142052617\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5452218430034129,\n \"acc_stderr\": 0.014551507060836357,\n \"acc_norm\": 0.5887372013651877,\n \"acc_norm_stderr\": 0.014379441068522089\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6437960565624378,\n \"acc_stderr\": 0.004778978031389639,\n \"acc_norm\": 0.8355905198167696,\n \"acc_norm_stderr\": 0.0036988923883801003\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.04292596718256981,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.04292596718256981\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5986842105263158,\n \"acc_stderr\": 0.039889037033362836,\n \"acc_norm\": 0.5986842105263158,\n \"acc_norm_stderr\": 0.039889037033362836\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.028727502957880267,\n \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.028727502957880267\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6805555555555556,\n \"acc_stderr\": 0.038990736873573344,\n \"acc_norm\": 0.6805555555555556,\n \"acc_norm_stderr\": 0.038990736873573344\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6011560693641619,\n \"acc_stderr\": 0.0373362665538351,\n \"acc_norm\": 0.6011560693641619,\n \"acc_norm_stderr\": 0.0373362665538351\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.47058823529411764,\n \"acc_stderr\": 0.04966570903978529,\n \"acc_norm\": 0.47058823529411764,\n \"acc_norm_stderr\": 0.04966570903978529\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5148936170212766,\n \"acc_stderr\": 0.03267151848924777,\n \"acc_norm\": 0.5148936170212766,\n \"acc_norm_stderr\": 0.03267151848924777\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.42105263157894735,\n \"acc_stderr\": 0.046446020912223177,\n \"acc_norm\": 0.42105263157894735,\n \"acc_norm_stderr\": 0.046446020912223177\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555497,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555497\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3915343915343915,\n \"acc_stderr\": 0.025138091388851102,\n \"acc_norm\": 0.3915343915343915,\n \"acc_norm_stderr\": 0.025138091388851102\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7064516129032258,\n \"acc_stderr\": 0.025906087021319295,\n \"acc_norm\": 0.7064516129032258,\n \"acc_norm_stderr\": 0.025906087021319295\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.035176035403610084,\n \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.035176035403610084\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.0347769116216366,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.0347769116216366\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.03173071239071724,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03173071239071724\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8497409326424871,\n \"acc_stderr\": 0.02578772318072387,\n \"acc_norm\": 0.8497409326424871,\n \"acc_norm_stderr\": 0.02578772318072387\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5743589743589743,\n \"acc_stderr\": 0.025069094387296532,\n \"acc_norm\": 0.5743589743589743,\n \"acc_norm_stderr\": 0.025069094387296532\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34444444444444444,\n \"acc_stderr\": 0.028972648884844267,\n \"acc_norm\": 0.34444444444444444,\n \"acc_norm_stderr\": 0.028972648884844267\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.592436974789916,\n \"acc_stderr\": 0.03191863374478466,\n \"acc_norm\": 0.592436974789916,\n \"acc_norm_stderr\": 0.03191863374478466\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7889908256880734,\n \"acc_stderr\": 0.01749392240411265,\n \"acc_norm\": 0.7889908256880734,\n \"acc_norm_stderr\": 0.01749392240411265\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.03372343271653064,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.03372343271653064\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7637130801687764,\n \"acc_stderr\": 0.02765215314415926,\n \"acc_norm\": 0.7637130801687764,\n \"acc_norm_stderr\": 0.02765215314415926\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6143497757847534,\n \"acc_stderr\": 0.03266842214289201,\n \"acc_norm\": 0.6143497757847534,\n \"acc_norm_stderr\": 0.03266842214289201\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7099236641221374,\n \"acc_stderr\": 0.03980066246467766,\n \"acc_norm\": 0.7099236641221374,\n \"acc_norm_stderr\": 0.03980066246467766\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.04373313040914761,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.04373313040914761\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7300613496932515,\n \"acc_stderr\": 0.034878251684978906,\n \"acc_norm\": 0.7300613496932515,\n \"acc_norm_stderr\": 0.034878251684978906\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010213,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010213\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.022509033937077788,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.022509033937077788\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7701149425287356,\n \"acc_stderr\": 0.01504630184669182,\n \"acc_norm\": 0.7701149425287356,\n \"acc_norm_stderr\": 0.01504630184669182\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.025070713719153183,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.025070713719153183\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.34413407821229053,\n \"acc_stderr\": 0.015889221313307094,\n \"acc_norm\": 0.34413407821229053,\n \"acc_norm_stderr\": 0.015889221313307094\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6699346405228758,\n \"acc_stderr\": 0.0269256546536157,\n \"acc_norm\": 0.6699346405228758,\n \"acc_norm_stderr\": 0.0269256546536157\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6784565916398714,\n \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.6784565916398714,\n \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6820987654320988,\n \"acc_stderr\": 0.02591006352824087,\n \"acc_norm\": 0.6820987654320988,\n \"acc_norm_stderr\": 0.02591006352824087\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.45390070921985815,\n \"acc_stderr\": 0.02970045324729146,\n \"acc_norm\": 0.45390070921985815,\n \"acc_norm_stderr\": 0.02970045324729146\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.42503259452411996,\n \"acc_stderr\": 0.012625879884892,\n \"acc_norm\": 0.42503259452411996,\n \"acc_norm_stderr\": 0.012625879884892\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5808823529411765,\n \"acc_stderr\": 0.02997280717046462,\n \"acc_norm\": 0.5808823529411765,\n \"acc_norm_stderr\": 0.02997280717046462\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6062091503267973,\n \"acc_stderr\": 0.019766211991073056,\n \"acc_norm\": 0.6062091503267973,\n \"acc_norm_stderr\": 0.019766211991073056\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252091,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252091\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.028535560337128448,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.028535560337128448\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8059701492537313,\n \"acc_stderr\": 0.027962677604768907,\n \"acc_norm\": 0.8059701492537313,\n \"acc_norm_stderr\": 0.027962677604768907\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4939759036144578,\n \"acc_stderr\": 0.03892212195333045,\n \"acc_norm\": 0.4939759036144578,\n \"acc_norm_stderr\": 0.03892212195333045\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5042839657282742,\n \"mc1_stderr\": 0.01750285857737126,\n \"mc2\": 0.6626283921742918,\n \"mc2_stderr\": 0.015319646142052617\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7632202052091555,\n \"acc_stderr\": 0.011947592365207406\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.38286580742987114,\n \"acc_stderr\": 0.013389223491820465\n }\n}\n```", "repo_url": "https://huggingface.co/silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|arc:challenge|25_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|gsm8k|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hellaswag|10_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T17-40-11.975482.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["**/details_harness|winogrande|5_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T17-40-11.975482.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T17_40_11.975482", "path": ["results_2024-01-22T17-40-11.975482.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T17-40-11.975482.parquet"]}]}]}
2024-01-22T17:42:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3 Dataset automatically created during the evaluation run of model silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T17:40:11.975482(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3\n\n\n\nDataset automatically created during the evaluation run of model silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T17:40:11.975482(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3\n\n\n\nDataset automatically created during the evaluation run of model silvercoder45/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T17:40:11.975482(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
b534ec8d0024cefc1ccf1ca8f1acbced98b82141
# Dataset Card for Evaluation run of robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2](https://huggingface.co/robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_robinsmits__Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T17:51:45.656296](https://huggingface.co/datasets/open-llm-leaderboard/details_robinsmits__Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2/blob/main/results_2024-01-22T17-51-45.656296.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5919635778509046, "acc_stderr": 0.03367247342299486, "acc_norm": 0.596476293969353, "acc_norm_stderr": 0.03436978735851277, "mc1": 0.4773561811505508, "mc1_stderr": 0.017485542258489646, "mc2": 0.6407684674777628, "mc2_stderr": 0.015297982301051796 }, "harness|arc:challenge|25": { "acc": 0.5716723549488054, "acc_stderr": 0.014460496367599017, "acc_norm": 0.6186006825938567, "acc_norm_stderr": 0.014194389086685247 }, "harness|hellaswag|10": { "acc": 0.6471818362875921, "acc_stderr": 0.004768701562988875, "acc_norm": 0.8370842461661023, "acc_norm_stderr": 0.003685340687255413 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.562962962962963, "acc_stderr": 0.04284958639753401, "acc_norm": 0.562962962962963, "acc_norm_stderr": 0.04284958639753401 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5855263157894737, "acc_stderr": 0.04008973785779206, "acc_norm": 0.5855263157894737, "acc_norm_stderr": 0.04008973785779206 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6339622641509434, "acc_stderr": 0.02964781353936525, "acc_norm": 0.6339622641509434, "acc_norm_stderr": 0.02964781353936525 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6597222222222222, "acc_stderr": 0.039621355734862175, "acc_norm": 0.6597222222222222, "acc_norm_stderr": 0.039621355734862175 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5664739884393064, "acc_stderr": 0.037786210790920566, "acc_norm": 0.5664739884393064, "acc_norm_stderr": 0.037786210790920566 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4803921568627451, "acc_stderr": 0.04971358884367405, "acc_norm": 0.4803921568627451, "acc_norm_stderr": 0.04971358884367405 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.67, "acc_stderr": 0.04725815626252609, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252609 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5063829787234042, "acc_stderr": 0.03268335899936337, "acc_norm": 0.5063829787234042, "acc_norm_stderr": 0.03268335899936337 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.39473684210526316, "acc_stderr": 0.045981880578165414, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.045981880578165414 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555497, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555497 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3835978835978836, "acc_stderr": 0.025043757318520196, "acc_norm": 0.3835978835978836, "acc_norm_stderr": 0.025043757318520196 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4126984126984127, "acc_stderr": 0.04403438954768177, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.04403438954768177 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6129032258064516, "acc_stderr": 0.027709359675032488, "acc_norm": 0.6129032258064516, "acc_norm_stderr": 0.027709359675032488 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.0351760354036101, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.0351760354036101 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.64, "acc_stderr": 0.048241815132442176, "acc_norm": 0.64, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6909090909090909, "acc_stderr": 0.036085410115739666, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.036085410115739666 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7323232323232324, "acc_stderr": 0.03154449888270285, "acc_norm": 0.7323232323232324, "acc_norm_stderr": 0.03154449888270285 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.844559585492228, "acc_stderr": 0.026148483469153303, "acc_norm": 0.844559585492228, "acc_norm_stderr": 0.026148483469153303 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5487179487179488, "acc_stderr": 0.025230381238934837, "acc_norm": 0.5487179487179488, "acc_norm_stderr": 0.025230381238934837 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.029381620726465066, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.029381620726465066 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6092436974789915, "acc_stderr": 0.03169380235712997, "acc_norm": 0.6092436974789915, "acc_norm_stderr": 0.03169380235712997 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7761467889908257, "acc_stderr": 0.01787121776779024, "acc_norm": 0.7761467889908257, "acc_norm_stderr": 0.01787121776779024 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.03407632093854052, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.03407632093854052 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7598039215686274, "acc_stderr": 0.02998373305591361, "acc_norm": 0.7598039215686274, "acc_norm_stderr": 0.02998373305591361 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7383966244725738, "acc_stderr": 0.028609516716994934, "acc_norm": 0.7383966244725738, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6053811659192825, "acc_stderr": 0.03280400504755291, "acc_norm": 0.6053811659192825, "acc_norm_stderr": 0.03280400504755291 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7251908396946565, "acc_stderr": 0.039153454088478354, "acc_norm": 0.7251908396946565, "acc_norm_stderr": 0.039153454088478354 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7355371900826446, "acc_stderr": 0.040261875275912073, "acc_norm": 0.7355371900826446, "acc_norm_stderr": 0.040261875275912073 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7300613496932515, "acc_stderr": 0.03487825168497892, "acc_norm": 0.7300613496932515, "acc_norm_stderr": 0.03487825168497892 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7184466019417476, "acc_stderr": 0.04453254836326466, "acc_norm": 0.7184466019417476, "acc_norm_stderr": 0.04453254836326466 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8461538461538461, "acc_stderr": 0.023636873317489294, "acc_norm": 0.8461538461538461, "acc_norm_stderr": 0.023636873317489294 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7598978288633461, "acc_stderr": 0.015274685213734198, "acc_norm": 0.7598978288633461, "acc_norm_stderr": 0.015274685213734198 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.653179190751445, "acc_stderr": 0.025624723994030454, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.025624723994030454 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.35195530726256985, "acc_stderr": 0.01597266852368907, "acc_norm": 0.35195530726256985, "acc_norm_stderr": 0.01597266852368907 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6535947712418301, "acc_stderr": 0.02724561304721535, "acc_norm": 0.6535947712418301, "acc_norm_stderr": 0.02724561304721535 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6655948553054662, "acc_stderr": 0.026795422327893937, "acc_norm": 0.6655948553054662, "acc_norm_stderr": 0.026795422327893937 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6604938271604939, "acc_stderr": 0.026348564412011624, "acc_norm": 0.6604938271604939, "acc_norm_stderr": 0.026348564412011624 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4326241134751773, "acc_stderr": 0.029555454236778852, "acc_norm": 0.4326241134751773, "acc_norm_stderr": 0.029555454236778852 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.41851368970013036, "acc_stderr": 0.012599505608336461, "acc_norm": 0.41851368970013036, "acc_norm_stderr": 0.012599505608336461 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5992647058823529, "acc_stderr": 0.02976826352893311, "acc_norm": 0.5992647058823529, "acc_norm_stderr": 0.02976826352893311 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6062091503267973, "acc_stderr": 0.019766211991073063, "acc_norm": 0.6062091503267973, "acc_norm_stderr": 0.019766211991073063 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.04494290866252091, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.04494290866252091 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6467661691542289, "acc_stderr": 0.03379790611796777, "acc_norm": 0.6467661691542289, "acc_norm_stderr": 0.03379790611796777 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-virology|5": { "acc": 0.463855421686747, "acc_stderr": 0.03882310850890593, "acc_norm": 0.463855421686747, "acc_norm_stderr": 0.03882310850890593 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.029913127232368043, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.029913127232368043 }, "harness|truthfulqa:mc|0": { "mc1": 0.4773561811505508, "mc1_stderr": 0.017485542258489646, "mc2": 0.6407684674777628, "mc2_stderr": 0.015297982301051796 }, "harness|winogrande|5": { "acc": 0.7845303867403315, "acc_stderr": 0.011555295286059282 }, "harness|gsm8k|5": { "acc": 0.36997725549658833, "acc_stderr": 0.013298661207727129 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_robinsmits__Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2
[ "region:us" ]
2024-01-22T17:54:00+00:00
{"pretty_name": "Evaluation run of robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2", "dataset_summary": "Dataset automatically created during the evaluation run of model [robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2](https://huggingface.co/robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_robinsmits__Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T17:51:45.656296](https://huggingface.co/datasets/open-llm-leaderboard/details_robinsmits__Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2/blob/main/results_2024-01-22T17-51-45.656296.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5919635778509046,\n \"acc_stderr\": 0.03367247342299486,\n \"acc_norm\": 0.596476293969353,\n \"acc_norm_stderr\": 0.03436978735851277,\n \"mc1\": 0.4773561811505508,\n \"mc1_stderr\": 0.017485542258489646,\n \"mc2\": 0.6407684674777628,\n \"mc2_stderr\": 0.015297982301051796\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5716723549488054,\n \"acc_stderr\": 0.014460496367599017,\n \"acc_norm\": 0.6186006825938567,\n \"acc_norm_stderr\": 0.014194389086685247\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6471818362875921,\n \"acc_stderr\": 0.004768701562988875,\n \"acc_norm\": 0.8370842461661023,\n \"acc_norm_stderr\": 0.003685340687255413\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.562962962962963,\n \"acc_stderr\": 0.04284958639753401,\n \"acc_norm\": 0.562962962962963,\n \"acc_norm_stderr\": 0.04284958639753401\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5855263157894737,\n \"acc_stderr\": 0.04008973785779206,\n \"acc_norm\": 0.5855263157894737,\n \"acc_norm_stderr\": 0.04008973785779206\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6339622641509434,\n \"acc_stderr\": 0.02964781353936525,\n \"acc_norm\": 0.6339622641509434,\n \"acc_norm_stderr\": 0.02964781353936525\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6597222222222222,\n \"acc_stderr\": 0.039621355734862175,\n \"acc_norm\": 0.6597222222222222,\n \"acc_norm_stderr\": 0.039621355734862175\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5664739884393064,\n \"acc_stderr\": 0.037786210790920566,\n \"acc_norm\": 0.5664739884393064,\n \"acc_norm_stderr\": 0.037786210790920566\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4803921568627451,\n \"acc_stderr\": 0.04971358884367405,\n \"acc_norm\": 0.4803921568627451,\n \"acc_norm_stderr\": 0.04971358884367405\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252609,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252609\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5063829787234042,\n \"acc_stderr\": 0.03268335899936337,\n \"acc_norm\": 0.5063829787234042,\n \"acc_norm_stderr\": 0.03268335899936337\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.39473684210526316,\n \"acc_stderr\": 0.045981880578165414,\n \"acc_norm\": 0.39473684210526316,\n \"acc_norm_stderr\": 0.045981880578165414\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555497,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555497\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3835978835978836,\n \"acc_stderr\": 0.025043757318520196,\n \"acc_norm\": 0.3835978835978836,\n \"acc_norm_stderr\": 0.025043757318520196\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4126984126984127,\n \"acc_stderr\": 0.04403438954768177,\n \"acc_norm\": 0.4126984126984127,\n \"acc_norm_stderr\": 0.04403438954768177\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6129032258064516,\n \"acc_stderr\": 0.027709359675032488,\n \"acc_norm\": 0.6129032258064516,\n \"acc_norm_stderr\": 0.027709359675032488\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.0351760354036101,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.0351760354036101\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.036085410115739666,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.036085410115739666\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7323232323232324,\n \"acc_stderr\": 0.03154449888270285,\n \"acc_norm\": 0.7323232323232324,\n \"acc_norm_stderr\": 0.03154449888270285\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.844559585492228,\n \"acc_stderr\": 0.026148483469153303,\n \"acc_norm\": 0.844559585492228,\n \"acc_norm_stderr\": 0.026148483469153303\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5487179487179488,\n \"acc_stderr\": 0.025230381238934837,\n \"acc_norm\": 0.5487179487179488,\n \"acc_norm_stderr\": 0.025230381238934837\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.029381620726465066,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.029381620726465066\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6092436974789915,\n \"acc_stderr\": 0.03169380235712997,\n \"acc_norm\": 0.6092436974789915,\n \"acc_norm_stderr\": 0.03169380235712997\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7761467889908257,\n \"acc_stderr\": 0.01787121776779024,\n \"acc_norm\": 0.7761467889908257,\n \"acc_norm_stderr\": 0.01787121776779024\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.03407632093854052,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.03407632093854052\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7598039215686274,\n \"acc_stderr\": 0.02998373305591361,\n \"acc_norm\": 0.7598039215686274,\n \"acc_norm_stderr\": 0.02998373305591361\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7383966244725738,\n \"acc_stderr\": 0.028609516716994934,\n \"acc_norm\": 0.7383966244725738,\n \"acc_norm_stderr\": 0.028609516716994934\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6053811659192825,\n \"acc_stderr\": 0.03280400504755291,\n \"acc_norm\": 0.6053811659192825,\n \"acc_norm_stderr\": 0.03280400504755291\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7251908396946565,\n \"acc_stderr\": 0.039153454088478354,\n \"acc_norm\": 0.7251908396946565,\n \"acc_norm_stderr\": 0.039153454088478354\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7355371900826446,\n \"acc_stderr\": 0.040261875275912073,\n \"acc_norm\": 0.7355371900826446,\n \"acc_norm_stderr\": 0.040261875275912073\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7300613496932515,\n \"acc_stderr\": 0.03487825168497892,\n \"acc_norm\": 0.7300613496932515,\n \"acc_norm_stderr\": 0.03487825168497892\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7184466019417476,\n \"acc_stderr\": 0.04453254836326466,\n \"acc_norm\": 0.7184466019417476,\n \"acc_norm_stderr\": 0.04453254836326466\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8461538461538461,\n \"acc_stderr\": 0.023636873317489294,\n \"acc_norm\": 0.8461538461538461,\n \"acc_norm_stderr\": 0.023636873317489294\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7598978288633461,\n \"acc_stderr\": 0.015274685213734198,\n \"acc_norm\": 0.7598978288633461,\n \"acc_norm_stderr\": 0.015274685213734198\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.025624723994030454,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.025624723994030454\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.35195530726256985,\n \"acc_stderr\": 0.01597266852368907,\n \"acc_norm\": 0.35195530726256985,\n \"acc_norm_stderr\": 0.01597266852368907\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6535947712418301,\n \"acc_stderr\": 0.02724561304721535,\n \"acc_norm\": 0.6535947712418301,\n \"acc_norm_stderr\": 0.02724561304721535\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6655948553054662,\n \"acc_stderr\": 0.026795422327893937,\n \"acc_norm\": 0.6655948553054662,\n \"acc_norm_stderr\": 0.026795422327893937\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6604938271604939,\n \"acc_stderr\": 0.026348564412011624,\n \"acc_norm\": 0.6604938271604939,\n \"acc_norm_stderr\": 0.026348564412011624\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4326241134751773,\n \"acc_stderr\": 0.029555454236778852,\n \"acc_norm\": 0.4326241134751773,\n \"acc_norm_stderr\": 0.029555454236778852\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41851368970013036,\n \"acc_stderr\": 0.012599505608336461,\n \"acc_norm\": 0.41851368970013036,\n \"acc_norm_stderr\": 0.012599505608336461\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5992647058823529,\n \"acc_stderr\": 0.02976826352893311,\n \"acc_norm\": 0.5992647058823529,\n \"acc_norm_stderr\": 0.02976826352893311\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6062091503267973,\n \"acc_stderr\": 0.019766211991073063,\n \"acc_norm\": 0.6062091503267973,\n \"acc_norm_stderr\": 0.019766211991073063\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252091,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252091\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6467661691542289,\n \"acc_stderr\": 0.03379790611796777,\n \"acc_norm\": 0.6467661691542289,\n \"acc_norm_stderr\": 0.03379790611796777\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.463855421686747,\n \"acc_stderr\": 0.03882310850890593,\n \"acc_norm\": 0.463855421686747,\n \"acc_norm_stderr\": 0.03882310850890593\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.029913127232368043,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.029913127232368043\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4773561811505508,\n \"mc1_stderr\": 0.017485542258489646,\n \"mc2\": 0.6407684674777628,\n \"mc2_stderr\": 0.015297982301051796\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7845303867403315,\n \"acc_stderr\": 0.011555295286059282\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.36997725549658833,\n \"acc_stderr\": 0.013298661207727129\n }\n}\n```", "repo_url": "https://huggingface.co/robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|arc:challenge|25_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|gsm8k|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hellaswag|10_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T17-51-45.656296.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["**/details_harness|winogrande|5_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T17-51-45.656296.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T17_51_45.656296", "path": ["results_2024-01-22T17-51-45.656296.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T17-51-45.656296.parquet"]}]}]}
2024-01-22T17:54:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2 Dataset automatically created during the evaluation run of model robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T17:51:45.656296(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2\n\n\n\nDataset automatically created during the evaluation run of model robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T17:51:45.656296(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2\n\n\n\nDataset automatically created during the evaluation run of model robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpaca-DPO2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T17:51:45.656296(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
35e50dd8a0eadd829d51b4001a1ae5a891ead95b
# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1](https://huggingface.co/silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T04:42:00.925654](https://huggingface.co/datasets/open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e1/blob/main/results_2024-01-23T04-42-00.925654.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6482345665656519, "acc_stderr": 0.0322653053904102, "acc_norm": 0.647516919767443, "acc_norm_stderr": 0.032939434760510346, "mc1": 0.6034271725826194, "mc1_stderr": 0.017124930942023515, "mc2": 0.7279172395260135, "mc2_stderr": 0.014793132788677908 }, "harness|arc:challenge|25": { "acc": 0.7192832764505119, "acc_stderr": 0.01313123812697558, "acc_norm": 0.7397610921501706, "acc_norm_stderr": 0.012821930225112571 }, "harness|hellaswag|10": { "acc": 0.7394941246763593, "acc_stderr": 0.004380136468543941, "acc_norm": 0.8926508663612827, "acc_norm_stderr": 0.0030892396746331585 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6370370370370371, "acc_stderr": 0.041539484047423976, "acc_norm": 0.6370370370370371, "acc_norm_stderr": 0.041539484047423976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7132075471698113, "acc_stderr": 0.027834912527544067, "acc_norm": 0.7132075471698113, "acc_norm_stderr": 0.027834912527544067 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.75, "acc_stderr": 0.03621034121889507, "acc_norm": 0.75, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6358381502890174, "acc_stderr": 0.03669072477416907, "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.03669072477416907 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.04858083574266344, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.04858083574266344 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909284, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909284 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5574468085106383, "acc_stderr": 0.03246956919789958, "acc_norm": 0.5574468085106383, "acc_norm_stderr": 0.03246956919789958 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.025446365634406776, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.025446365634406776 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04444444444444449, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04444444444444449 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.02341529343356852, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.02341529343356852 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494563, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494563 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402538, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402538 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.02874204090394848, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.02874204090394848 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6680672268907563, "acc_stderr": 0.03058869701378364, "acc_norm": 0.6680672268907563, "acc_norm_stderr": 0.03058869701378364 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8366972477064221, "acc_stderr": 0.01584825580650155, "acc_norm": 0.8366972477064221, "acc_norm_stderr": 0.01584825580650155 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5138888888888888, "acc_stderr": 0.03408655867977749, "acc_norm": 0.5138888888888888, "acc_norm_stderr": 0.03408655867977749 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.025845017986926917, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.025845017986926917 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7974683544303798, "acc_stderr": 0.026160568246601446, "acc_norm": 0.7974683544303798, "acc_norm_stderr": 0.026160568246601446 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7709923664122137, "acc_stderr": 0.036853466317118506, "acc_norm": 0.7709923664122137, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.038498560987940904, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.038498560987940904 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.046695106638751906, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.046695106638751906 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8675213675213675, "acc_stderr": 0.022209309073165612, "acc_norm": 0.8675213675213675, "acc_norm_stderr": 0.022209309073165612 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8250319284802043, "acc_stderr": 0.013586619219903341, "acc_norm": 0.8250319284802043, "acc_norm_stderr": 0.013586619219903341 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7138728323699421, "acc_stderr": 0.02433214677913413, "acc_norm": 0.7138728323699421, "acc_norm_stderr": 0.02433214677913413 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.43798882681564244, "acc_stderr": 0.016593394227564843, "acc_norm": 0.43798882681564244, "acc_norm_stderr": 0.016593394227564843 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7156862745098039, "acc_stderr": 0.025829163272757482, "acc_norm": 0.7156862745098039, "acc_norm_stderr": 0.025829163272757482 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.02583989833487798, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7376543209876543, "acc_stderr": 0.024477222856135114, "acc_norm": 0.7376543209876543, "acc_norm_stderr": 0.024477222856135114 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5, "acc_stderr": 0.029827499313594685, "acc_norm": 0.5, "acc_norm_stderr": 0.029827499313594685 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4680573663624511, "acc_stderr": 0.012744149704869649, "acc_norm": 0.4680573663624511, "acc_norm_stderr": 0.012744149704869649 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6691176470588235, "acc_stderr": 0.02858270975389845, "acc_norm": 0.6691176470588235, "acc_norm_stderr": 0.02858270975389845 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6633986928104575, "acc_stderr": 0.019117213911495144, "acc_norm": 0.6633986928104575, "acc_norm_stderr": 0.019117213911495144 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7183673469387755, "acc_stderr": 0.028795185574291293, "acc_norm": 0.7183673469387755, "acc_norm_stderr": 0.028795185574291293 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8208955223880597, "acc_stderr": 0.027113286753111837, "acc_norm": 0.8208955223880597, "acc_norm_stderr": 0.027113286753111837 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.03588702812826371, "acc_norm": 0.85, "acc_norm_stderr": 0.03588702812826371 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699121, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699121 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.6034271725826194, "mc1_stderr": 0.017124930942023515, "mc2": 0.7279172395260135, "mc2_stderr": 0.014793132788677908 }, "harness|winogrande|5": { "acc": 0.8445146014206788, "acc_stderr": 0.010184308214775778 }, "harness|gsm8k|5": { "acc": 0.6679302501895376, "acc_stderr": 0.012972465034361861 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e1
[ "region:us" ]
2024-01-22T17:58:05+00:00
{"pretty_name": "Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1", "dataset_summary": "Dataset automatically created during the evaluation run of model [silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1](https://huggingface.co/silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T04:42:00.925654](https://huggingface.co/datasets/open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e1/blob/main/results_2024-01-23T04-42-00.925654.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6482345665656519,\n \"acc_stderr\": 0.0322653053904102,\n \"acc_norm\": 0.647516919767443,\n \"acc_norm_stderr\": 0.032939434760510346,\n \"mc1\": 0.6034271725826194,\n \"mc1_stderr\": 0.017124930942023515,\n \"mc2\": 0.7279172395260135,\n \"mc2_stderr\": 0.014793132788677908\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7192832764505119,\n \"acc_stderr\": 0.01313123812697558,\n \"acc_norm\": 0.7397610921501706,\n \"acc_norm_stderr\": 0.012821930225112571\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7394941246763593,\n \"acc_stderr\": 0.004380136468543941,\n \"acc_norm\": 0.8926508663612827,\n \"acc_norm_stderr\": 0.0030892396746331585\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n \"acc_stderr\": 0.041539484047423976,\n \"acc_norm\": 0.6370370370370371,\n \"acc_norm_stderr\": 0.041539484047423976\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7132075471698113,\n \"acc_stderr\": 0.027834912527544067,\n \"acc_norm\": 0.7132075471698113,\n \"acc_norm_stderr\": 0.027834912527544067\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6358381502890174,\n \"acc_stderr\": 0.03669072477416907,\n \"acc_norm\": 0.6358381502890174,\n \"acc_norm_stderr\": 0.03669072477416907\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.04858083574266344,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.04858083574266344\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.025446365634406776,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.025446365634406776\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.04444444444444449,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.04444444444444449\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.02341529343356852,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.02341529343356852\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494563,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494563\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402538,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402538\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.02874204090394848,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.02874204090394848\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6680672268907563,\n \"acc_stderr\": 0.03058869701378364,\n \"acc_norm\": 0.6680672268907563,\n \"acc_norm_stderr\": 0.03058869701378364\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8366972477064221,\n \"acc_stderr\": 0.01584825580650155,\n \"acc_norm\": 0.8366972477064221,\n \"acc_norm_stderr\": 0.01584825580650155\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5138888888888888,\n \"acc_stderr\": 0.03408655867977749,\n \"acc_norm\": 0.5138888888888888,\n \"acc_norm_stderr\": 0.03408655867977749\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.025845017986926917,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.025845017986926917\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7974683544303798,\n \"acc_stderr\": 0.026160568246601446,\n \"acc_norm\": 0.7974683544303798,\n \"acc_norm_stderr\": 0.026160568246601446\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.038498560987940904,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.038498560987940904\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4107142857142857,\n \"acc_stderr\": 0.046695106638751906,\n \"acc_norm\": 0.4107142857142857,\n \"acc_norm_stderr\": 0.046695106638751906\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8675213675213675,\n \"acc_stderr\": 0.022209309073165612,\n \"acc_norm\": 0.8675213675213675,\n \"acc_norm_stderr\": 0.022209309073165612\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8250319284802043,\n \"acc_stderr\": 0.013586619219903341,\n \"acc_norm\": 0.8250319284802043,\n \"acc_norm_stderr\": 0.013586619219903341\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7138728323699421,\n \"acc_stderr\": 0.02433214677913413,\n \"acc_norm\": 0.7138728323699421,\n \"acc_norm_stderr\": 0.02433214677913413\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.43798882681564244,\n \"acc_stderr\": 0.016593394227564843,\n \"acc_norm\": 0.43798882681564244,\n \"acc_norm_stderr\": 0.016593394227564843\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7156862745098039,\n \"acc_stderr\": 0.025829163272757482,\n \"acc_norm\": 0.7156862745098039,\n \"acc_norm_stderr\": 0.025829163272757482\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7376543209876543,\n \"acc_stderr\": 0.024477222856135114,\n \"acc_norm\": 0.7376543209876543,\n \"acc_norm_stderr\": 0.024477222856135114\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.029827499313594685,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.029827499313594685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4680573663624511,\n \"acc_stderr\": 0.012744149704869649,\n \"acc_norm\": 0.4680573663624511,\n \"acc_norm_stderr\": 0.012744149704869649\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6691176470588235,\n \"acc_stderr\": 0.02858270975389845,\n \"acc_norm\": 0.6691176470588235,\n \"acc_norm_stderr\": 0.02858270975389845\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6633986928104575,\n \"acc_stderr\": 0.019117213911495144,\n \"acc_norm\": 0.6633986928104575,\n \"acc_norm_stderr\": 0.019117213911495144\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7183673469387755,\n \"acc_stderr\": 0.028795185574291293,\n \"acc_norm\": 0.7183673469387755,\n \"acc_norm_stderr\": 0.028795185574291293\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8208955223880597,\n \"acc_stderr\": 0.027113286753111837,\n \"acc_norm\": 0.8208955223880597,\n \"acc_norm_stderr\": 0.027113286753111837\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699121,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699121\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.6034271725826194,\n \"mc1_stderr\": 0.017124930942023515,\n \"mc2\": 0.7279172395260135,\n \"mc2_stderr\": 0.014793132788677908\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8445146014206788,\n \"acc_stderr\": 0.010184308214775778\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6679302501895376,\n \"acc_stderr\": 0.012972465034361861\n }\n}\n```", "repo_url": "https://huggingface.co/silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|arc:challenge|25_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|arc:challenge|25_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|gsm8k|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|gsm8k|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hellaswag|10_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hellaswag|10_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T17-55-45.575656.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T04-42-00.925654.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["**/details_harness|winogrande|5_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["**/details_harness|winogrande|5_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T04-42-00.925654.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T17_55_45.575656", "path": ["results_2024-01-22T17-55-45.575656.parquet"]}, {"split": "2024_01_23T04_42_00.925654", "path": ["results_2024-01-23T04-42-00.925654.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T04-42-00.925654.parquet"]}]}]}
2024-01-23T04:44:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1 Dataset automatically created during the evaluation run of model silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T04:42:00.925654(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1\n\n\n\nDataset automatically created during the evaluation run of model silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T04:42:00.925654(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1\n\n\n\nDataset automatically created during the evaluation run of model silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T04:42:00.925654(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
42e9bf45619e33494b77c9c9879079bdb68d99df
# Dataset Card for Evaluation run of ibivibiv/athene-noctua-13b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [ibivibiv/athene-noctua-13b](https://huggingface.co/ibivibiv/athene-noctua-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ibivibiv__athene-noctua-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T17:56:03.589917](https://huggingface.co/datasets/open-llm-leaderboard/details_ibivibiv__athene-noctua-13b/blob/main/results_2024-01-22T17-56-03.589917.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5559583776204916, "acc_stderr": 0.03379276005294812, "acc_norm": 0.5636396685887931, "acc_norm_stderr": 0.03454138986630861, "mc1": 0.3329253365973072, "mc1_stderr": 0.016497402382012055, "mc2": 0.4749199462924083, "mc2_stderr": 0.015249125483447337 }, "harness|arc:challenge|25": { "acc": 0.5401023890784983, "acc_stderr": 0.01456431885692485, "acc_norm": 0.5716723549488054, "acc_norm_stderr": 0.014460496367599017 }, "harness|hellaswag|10": { "acc": 0.6183031268671579, "acc_stderr": 0.0048480996616197, "acc_norm": 0.8151762597092213, "acc_norm_stderr": 0.0038736123391606555 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4962962962962963, "acc_stderr": 0.04319223625811331, "acc_norm": 0.4962962962962963, "acc_norm_stderr": 0.04319223625811331 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5328947368421053, "acc_stderr": 0.040601270352363966, "acc_norm": 0.5328947368421053, "acc_norm_stderr": 0.040601270352363966 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6188679245283019, "acc_stderr": 0.029890609686286637, "acc_norm": 0.6188679245283019, "acc_norm_stderr": 0.029890609686286637 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5763888888888888, "acc_stderr": 0.041321250197233685, "acc_norm": 0.5763888888888888, "acc_norm_stderr": 0.041321250197233685 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5144508670520231, "acc_stderr": 0.03810871630454764, "acc_norm": 0.5144508670520231, "acc_norm_stderr": 0.03810871630454764 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107224, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107224 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.43829787234042555, "acc_stderr": 0.03243618636108101, "acc_norm": 0.43829787234042555, "acc_norm_stderr": 0.03243618636108101 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3508771929824561, "acc_stderr": 0.044895393502706986, "acc_norm": 0.3508771929824561, "acc_norm_stderr": 0.044895393502706986 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5241379310344828, "acc_stderr": 0.041618085035015295, "acc_norm": 0.5241379310344828, "acc_norm_stderr": 0.041618085035015295 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3201058201058201, "acc_stderr": 0.024026846392873506, "acc_norm": 0.3201058201058201, "acc_norm_stderr": 0.024026846392873506 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3888888888888889, "acc_stderr": 0.04360314860077459, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.04360314860077459 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5935483870967742, "acc_stderr": 0.027941727346256308, "acc_norm": 0.5935483870967742, "acc_norm_stderr": 0.027941727346256308 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4187192118226601, "acc_stderr": 0.03471192860518468, "acc_norm": 0.4187192118226601, "acc_norm_stderr": 0.03471192860518468 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7212121212121212, "acc_stderr": 0.03501438706296781, "acc_norm": 0.7212121212121212, "acc_norm_stderr": 0.03501438706296781 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.702020202020202, "acc_stderr": 0.03258630383836556, "acc_norm": 0.702020202020202, "acc_norm_stderr": 0.03258630383836556 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7823834196891192, "acc_stderr": 0.029778663037752954, "acc_norm": 0.7823834196891192, "acc_norm_stderr": 0.029778663037752954 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.558974358974359, "acc_stderr": 0.025174048384000745, "acc_norm": 0.558974358974359, "acc_norm_stderr": 0.025174048384000745 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2814814814814815, "acc_stderr": 0.02742001935094527, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.02742001935094527 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5630252100840336, "acc_stderr": 0.03221943636566196, "acc_norm": 0.5630252100840336, "acc_norm_stderr": 0.03221943636566196 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7504587155963303, "acc_stderr": 0.018553897629501624, "acc_norm": 0.7504587155963303, "acc_norm_stderr": 0.018553897629501624 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.03372343271653063, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.03372343271653063 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7647058823529411, "acc_stderr": 0.02977177522814563, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.02977177522814563 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7805907172995781, "acc_stderr": 0.026939106581553945, "acc_norm": 0.7805907172995781, "acc_norm_stderr": 0.026939106581553945 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6457399103139013, "acc_stderr": 0.032100621541349864, "acc_norm": 0.6457399103139013, "acc_norm_stderr": 0.032100621541349864 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6183206106870229, "acc_stderr": 0.042607351576445594, "acc_norm": 0.6183206106870229, "acc_norm_stderr": 0.042607351576445594 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7107438016528925, "acc_stderr": 0.041391127276354626, "acc_norm": 0.7107438016528925, "acc_norm_stderr": 0.041391127276354626 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.04284467968052194, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.04284467968052194 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.656441717791411, "acc_stderr": 0.03731133519673893, "acc_norm": 0.656441717791411, "acc_norm_stderr": 0.03731133519673893 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.046695106638751906, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.046695106638751906 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.02777883590493543, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.02777883590493543 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.62, "acc_stderr": 0.04878317312145634, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145634 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.735632183908046, "acc_stderr": 0.01576998484069052, "acc_norm": 0.735632183908046, "acc_norm_stderr": 0.01576998484069052 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6271676300578035, "acc_stderr": 0.026033890613576277, "acc_norm": 0.6271676300578035, "acc_norm_stderr": 0.026033890613576277 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4111731843575419, "acc_stderr": 0.01645649803397752, "acc_norm": 0.4111731843575419, "acc_norm_stderr": 0.01645649803397752 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5947712418300654, "acc_stderr": 0.028110928492809068, "acc_norm": 0.5947712418300654, "acc_norm_stderr": 0.028110928492809068 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.639871382636656, "acc_stderr": 0.027264297599804015, "acc_norm": 0.639871382636656, "acc_norm_stderr": 0.027264297599804015 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6419753086419753, "acc_stderr": 0.02667561192603709, "acc_norm": 0.6419753086419753, "acc_norm_stderr": 0.02667561192603709 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.39361702127659576, "acc_stderr": 0.029144544781596147, "acc_norm": 0.39361702127659576, "acc_norm_stderr": 0.029144544781596147 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.41851368970013036, "acc_stderr": 0.01259950560833646, "acc_norm": 0.41851368970013036, "acc_norm_stderr": 0.01259950560833646 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5514705882352942, "acc_stderr": 0.030211479609121596, "acc_norm": 0.5514705882352942, "acc_norm_stderr": 0.030211479609121596 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5604575163398693, "acc_stderr": 0.020079420408087918, "acc_norm": 0.5604575163398693, "acc_norm_stderr": 0.020079420408087918 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6489795918367347, "acc_stderr": 0.030555316755573637, "acc_norm": 0.6489795918367347, "acc_norm_stderr": 0.030555316755573637 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7014925373134329, "acc_stderr": 0.032357437893550424, "acc_norm": 0.7014925373134329, "acc_norm_stderr": 0.032357437893550424 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.038612291966536934, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-virology|5": { "acc": 0.45180722891566266, "acc_stderr": 0.038743715565879536, "acc_norm": 0.45180722891566266, "acc_norm_stderr": 0.038743715565879536 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.031885780176863984, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.031885780176863984 }, "harness|truthfulqa:mc|0": { "mc1": 0.3329253365973072, "mc1_stderr": 0.016497402382012055, "mc2": 0.4749199462924083, "mc2_stderr": 0.015249125483447337 }, "harness|winogrande|5": { "acc": 0.734017363851618, "acc_stderr": 0.012418323153051044 }, "harness|gsm8k|5": { "acc": 0.15314632297194844, "acc_stderr": 0.009919728152791473 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_ibivibiv__athene-noctua-13b
[ "region:us" ]
2024-01-22T17:58:22+00:00
{"pretty_name": "Evaluation run of ibivibiv/athene-noctua-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [ibivibiv/athene-noctua-13b](https://huggingface.co/ibivibiv/athene-noctua-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ibivibiv__athene-noctua-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T17:56:03.589917](https://huggingface.co/datasets/open-llm-leaderboard/details_ibivibiv__athene-noctua-13b/blob/main/results_2024-01-22T17-56-03.589917.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5559583776204916,\n \"acc_stderr\": 0.03379276005294812,\n \"acc_norm\": 0.5636396685887931,\n \"acc_norm_stderr\": 0.03454138986630861,\n \"mc1\": 0.3329253365973072,\n \"mc1_stderr\": 0.016497402382012055,\n \"mc2\": 0.4749199462924083,\n \"mc2_stderr\": 0.015249125483447337\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5401023890784983,\n \"acc_stderr\": 0.01456431885692485,\n \"acc_norm\": 0.5716723549488054,\n \"acc_norm_stderr\": 0.014460496367599017\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6183031268671579,\n \"acc_stderr\": 0.0048480996616197,\n \"acc_norm\": 0.8151762597092213,\n \"acc_norm_stderr\": 0.0038736123391606555\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4962962962962963,\n \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.4962962962962963,\n \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5328947368421053,\n \"acc_stderr\": 0.040601270352363966,\n \"acc_norm\": 0.5328947368421053,\n \"acc_norm_stderr\": 0.040601270352363966\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6188679245283019,\n \"acc_stderr\": 0.029890609686286637,\n \"acc_norm\": 0.6188679245283019,\n \"acc_norm_stderr\": 0.029890609686286637\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5763888888888888,\n \"acc_stderr\": 0.041321250197233685,\n \"acc_norm\": 0.5763888888888888,\n \"acc_norm_stderr\": 0.041321250197233685\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5144508670520231,\n \"acc_stderr\": 0.03810871630454764,\n \"acc_norm\": 0.5144508670520231,\n \"acc_norm_stderr\": 0.03810871630454764\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107224,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107224\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.43829787234042555,\n \"acc_stderr\": 0.03243618636108101,\n \"acc_norm\": 0.43829787234042555,\n \"acc_norm_stderr\": 0.03243618636108101\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3508771929824561,\n \"acc_stderr\": 0.044895393502706986,\n \"acc_norm\": 0.3508771929824561,\n \"acc_norm_stderr\": 0.044895393502706986\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.041618085035015295,\n \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.041618085035015295\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3201058201058201,\n \"acc_stderr\": 0.024026846392873506,\n \"acc_norm\": 0.3201058201058201,\n \"acc_norm_stderr\": 0.024026846392873506\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.04360314860077459,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.04360314860077459\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5935483870967742,\n \"acc_stderr\": 0.027941727346256308,\n \"acc_norm\": 0.5935483870967742,\n \"acc_norm_stderr\": 0.027941727346256308\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4187192118226601,\n \"acc_stderr\": 0.03471192860518468,\n \"acc_norm\": 0.4187192118226601,\n \"acc_norm_stderr\": 0.03471192860518468\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7212121212121212,\n \"acc_stderr\": 0.03501438706296781,\n \"acc_norm\": 0.7212121212121212,\n \"acc_norm_stderr\": 0.03501438706296781\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.702020202020202,\n \"acc_stderr\": 0.03258630383836556,\n \"acc_norm\": 0.702020202020202,\n \"acc_norm_stderr\": 0.03258630383836556\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7823834196891192,\n \"acc_stderr\": 0.029778663037752954,\n \"acc_norm\": 0.7823834196891192,\n \"acc_norm_stderr\": 0.029778663037752954\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.558974358974359,\n \"acc_stderr\": 0.025174048384000745,\n \"acc_norm\": 0.558974358974359,\n \"acc_norm_stderr\": 0.025174048384000745\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2814814814814815,\n \"acc_stderr\": 0.02742001935094527,\n \"acc_norm\": 0.2814814814814815,\n \"acc_norm_stderr\": 0.02742001935094527\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5630252100840336,\n \"acc_stderr\": 0.03221943636566196,\n \"acc_norm\": 0.5630252100840336,\n \"acc_norm_stderr\": 0.03221943636566196\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.03802039760107903,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.03802039760107903\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7504587155963303,\n \"acc_stderr\": 0.018553897629501624,\n \"acc_norm\": 0.7504587155963303,\n \"acc_norm_stderr\": 0.018553897629501624\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.03372343271653063,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.03372343271653063\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.02977177522814563,\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.02977177522814563\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6457399103139013,\n \"acc_stderr\": 0.032100621541349864,\n \"acc_norm\": 0.6457399103139013,\n \"acc_norm_stderr\": 0.032100621541349864\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6183206106870229,\n \"acc_stderr\": 0.042607351576445594,\n \"acc_norm\": 0.6183206106870229,\n \"acc_norm_stderr\": 0.042607351576445594\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7107438016528925,\n \"acc_stderr\": 0.041391127276354626,\n \"acc_norm\": 0.7107438016528925,\n \"acc_norm_stderr\": 0.041391127276354626\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.04284467968052194,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.04284467968052194\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.656441717791411,\n \"acc_stderr\": 0.03731133519673893,\n \"acc_norm\": 0.656441717791411,\n \"acc_norm_stderr\": 0.03731133519673893\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4107142857142857,\n \"acc_stderr\": 0.046695106638751906,\n \"acc_norm\": 0.4107142857142857,\n \"acc_norm_stderr\": 0.046695106638751906\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7649572649572649,\n \"acc_stderr\": 0.02777883590493543,\n \"acc_norm\": 0.7649572649572649,\n \"acc_norm_stderr\": 0.02777883590493543\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145634,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145634\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.735632183908046,\n \"acc_stderr\": 0.01576998484069052,\n \"acc_norm\": 0.735632183908046,\n \"acc_norm_stderr\": 0.01576998484069052\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6271676300578035,\n \"acc_stderr\": 0.026033890613576277,\n \"acc_norm\": 0.6271676300578035,\n \"acc_norm_stderr\": 0.026033890613576277\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4111731843575419,\n \"acc_stderr\": 0.01645649803397752,\n \"acc_norm\": 0.4111731843575419,\n \"acc_norm_stderr\": 0.01645649803397752\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5947712418300654,\n \"acc_stderr\": 0.028110928492809068,\n \"acc_norm\": 0.5947712418300654,\n \"acc_norm_stderr\": 0.028110928492809068\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.639871382636656,\n \"acc_stderr\": 0.027264297599804015,\n \"acc_norm\": 0.639871382636656,\n \"acc_norm_stderr\": 0.027264297599804015\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6419753086419753,\n \"acc_stderr\": 0.02667561192603709,\n \"acc_norm\": 0.6419753086419753,\n \"acc_norm_stderr\": 0.02667561192603709\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.39361702127659576,\n \"acc_stderr\": 0.029144544781596147,\n \"acc_norm\": 0.39361702127659576,\n \"acc_norm_stderr\": 0.029144544781596147\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41851368970013036,\n \"acc_stderr\": 0.01259950560833646,\n \"acc_norm\": 0.41851368970013036,\n \"acc_norm_stderr\": 0.01259950560833646\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5514705882352942,\n \"acc_stderr\": 0.030211479609121596,\n \"acc_norm\": 0.5514705882352942,\n \"acc_norm_stderr\": 0.030211479609121596\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5604575163398693,\n \"acc_stderr\": 0.020079420408087918,\n \"acc_norm\": 0.5604575163398693,\n \"acc_norm_stderr\": 0.020079420408087918\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6489795918367347,\n \"acc_stderr\": 0.030555316755573637,\n \"acc_norm\": 0.6489795918367347,\n \"acc_norm_stderr\": 0.030555316755573637\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7014925373134329,\n \"acc_stderr\": 0.032357437893550424,\n \"acc_norm\": 0.7014925373134329,\n \"acc_norm_stderr\": 0.032357437893550424\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.45180722891566266,\n \"acc_stderr\": 0.038743715565879536,\n \"acc_norm\": 0.45180722891566266,\n \"acc_norm_stderr\": 0.038743715565879536\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.031885780176863984,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.031885780176863984\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3329253365973072,\n \"mc1_stderr\": 0.016497402382012055,\n \"mc2\": 0.4749199462924083,\n \"mc2_stderr\": 0.015249125483447337\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.734017363851618,\n \"acc_stderr\": 0.012418323153051044\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.15314632297194844,\n \"acc_stderr\": 0.009919728152791473\n }\n}\n```", "repo_url": "https://huggingface.co/ibivibiv/athene-noctua-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|arc:challenge|25_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|gsm8k|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hellaswag|10_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T17-56-03.589917.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["**/details_harness|winogrande|5_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T17-56-03.589917.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T17_56_03.589917", "path": ["results_2024-01-22T17-56-03.589917.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T17-56-03.589917.parquet"]}]}]}
2024-01-22T17:58:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ibivibiv/athene-noctua-13b Dataset automatically created during the evaluation run of model ibivibiv/athene-noctua-13b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T17:56:03.589917(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of ibivibiv/athene-noctua-13b\n\n\n\nDataset automatically created during the evaluation run of model ibivibiv/athene-noctua-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T17:56:03.589917(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ibivibiv/athene-noctua-13b\n\n\n\nDataset automatically created during the evaluation run of model ibivibiv/athene-noctua-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T17:56:03.589917(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
a53cdc51ced29ec20b7ae0576d97ffdb9498ccc7
# Dataset Card for Evaluation run of AA051612/B0122 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [AA051612/B0122](https://huggingface.co/AA051612/B0122) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_AA051612__B0122", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T18:14:13.351822](https://huggingface.co/datasets/open-llm-leaderboard/details_AA051612__B0122/blob/main/results_2024-01-22T18-14-13.351822.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.8018093992937142, "acc_stderr": 0.025943636891982935, "acc_norm": 0.8135255711326126, "acc_norm_stderr": 0.026390358482713747, "mc1": 0.401468788249694, "mc1_stderr": 0.017160273901693654, "mc2": 0.581997849772674, "mc2_stderr": 0.01539700975904714 }, "harness|arc:challenge|25": { "acc": 0.643344709897611, "acc_stderr": 0.013998056902620194, "acc_norm": 0.6791808873720137, "acc_norm_stderr": 0.01364094309194653 }, "harness|hellaswag|10": { "acc": 0.6594303923521211, "acc_stderr": 0.004729322613301549, "acc_norm": 0.8492332204740092, "acc_norm_stderr": 0.0035709011883580744 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.8222222222222222, "acc_stderr": 0.03302789859901717, "acc_norm": 0.8222222222222222, "acc_norm_stderr": 0.03302789859901717 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.868421052631579, "acc_stderr": 0.027508689533549912, "acc_norm": 0.868421052631579, "acc_norm_stderr": 0.027508689533549912 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.81, "acc_stderr": 0.03942772444036623, "acc_norm": 0.81, "acc_norm_stderr": 0.03942772444036623 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8679245283018868, "acc_stderr": 0.020837715430694004, "acc_norm": 0.8679245283018868, "acc_norm_stderr": 0.020837715430694004 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.9305555555555556, "acc_stderr": 0.021257974822832048, "acc_norm": 0.9305555555555556, "acc_norm_stderr": 0.021257974822832048 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.71, "acc_stderr": 0.04560480215720684, "acc_norm": 0.71, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7745664739884393, "acc_stderr": 0.03186209851641144, "acc_norm": 0.7745664739884393, "acc_norm_stderr": 0.03186209851641144 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.6568627450980392, "acc_stderr": 0.04724007352383888, "acc_norm": 0.6568627450980392, "acc_norm_stderr": 0.04724007352383888 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.8212765957446808, "acc_stderr": 0.025045373272050978, "acc_norm": 0.8212765957446808, "acc_norm_stderr": 0.025045373272050978 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.6491228070175439, "acc_stderr": 0.04489539350270698, "acc_norm": 0.6491228070175439, "acc_norm_stderr": 0.04489539350270698 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.8068965517241379, "acc_stderr": 0.03289445522127403, "acc_norm": 0.8068965517241379, "acc_norm_stderr": 0.03289445522127403 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.7645502645502645, "acc_stderr": 0.021851509822031722, "acc_norm": 0.7645502645502645, "acc_norm_stderr": 0.021851509822031722 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5793650793650794, "acc_stderr": 0.04415438226743745, "acc_norm": 0.5793650793650794, "acc_norm_stderr": 0.04415438226743745 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.9193548387096774, "acc_stderr": 0.015490002961591035, "acc_norm": 0.9193548387096774, "acc_norm_stderr": 0.015490002961591035 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6945812807881774, "acc_stderr": 0.03240661565868408, "acc_norm": 0.6945812807881774, "acc_norm_stderr": 0.03240661565868408 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.85, "acc_stderr": 0.035887028128263714, "acc_norm": 0.85, "acc_norm_stderr": 0.035887028128263714 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.9212121212121213, "acc_stderr": 0.021037183825716364, "acc_norm": 0.9212121212121213, "acc_norm_stderr": 0.021037183825716364 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9444444444444444, "acc_stderr": 0.0163199507007674, "acc_norm": 0.9444444444444444, "acc_norm_stderr": 0.0163199507007674 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9740932642487047, "acc_stderr": 0.01146452335695318, "acc_norm": 0.9740932642487047, "acc_norm_stderr": 0.01146452335695318 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.8717948717948718, "acc_stderr": 0.016950599120913946, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.016950599120913946 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.5481481481481482, "acc_stderr": 0.030343862998512633, "acc_norm": 0.5481481481481482, "acc_norm_stderr": 0.030343862998512633 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.9033613445378151, "acc_stderr": 0.019192520709708727, "acc_norm": 0.9033613445378151, "acc_norm_stderr": 0.019192520709708727 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.5894039735099338, "acc_stderr": 0.04016689594849929, "acc_norm": 0.5894039735099338, "acc_norm_stderr": 0.04016689594849929 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9376146788990826, "acc_stderr": 0.01036940784904347, "acc_norm": 0.9376146788990826, "acc_norm_stderr": 0.01036940784904347 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.7361111111111112, "acc_stderr": 0.030058202704309846, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.030058202704309846 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9362745098039216, "acc_stderr": 0.01714392165552496, "acc_norm": 0.9362745098039216, "acc_norm_stderr": 0.01714392165552496 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9324894514767933, "acc_stderr": 0.016332466673244395, "acc_norm": 0.9324894514767933, "acc_norm_stderr": 0.016332466673244395 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8251121076233184, "acc_stderr": 0.02549528462644497, "acc_norm": 0.8251121076233184, "acc_norm_stderr": 0.02549528462644497 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.9236641221374046, "acc_stderr": 0.02328893953617374, "acc_norm": 0.9236641221374046, "acc_norm_stderr": 0.02328893953617374 }, "harness|hendrycksTest-international_law|5": { "acc": 0.9256198347107438, "acc_stderr": 0.023952688836676752, "acc_norm": 0.9256198347107438, "acc_norm_stderr": 0.023952688836676752 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.9259259259259259, "acc_stderr": 0.025317997297209734, "acc_norm": 0.9259259259259259, "acc_norm_stderr": 0.025317997297209734 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.901840490797546, "acc_stderr": 0.023376180231059602, "acc_norm": 0.901840490797546, "acc_norm_stderr": 0.023376180231059602 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.6517857142857143, "acc_stderr": 0.045218299028335865, "acc_norm": 0.6517857142857143, "acc_norm_stderr": 0.045218299028335865 }, "harness|hendrycksTest-management|5": { "acc": 0.9223300970873787, "acc_stderr": 0.026501440784762752, "acc_norm": 0.9223300970873787, "acc_norm_stderr": 0.026501440784762752 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9743589743589743, "acc_stderr": 0.010354979197709018, "acc_norm": 0.9743589743589743, "acc_norm_stderr": 0.010354979197709018 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.92, "acc_stderr": 0.027265992434429086, "acc_norm": 0.92, "acc_norm_stderr": 0.027265992434429086 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.9374201787994891, "acc_stderr": 0.00866125712060537, "acc_norm": 0.9374201787994891, "acc_norm_stderr": 0.00866125712060537 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.846820809248555, "acc_stderr": 0.019390370108969934, "acc_norm": 0.846820809248555, "acc_norm_stderr": 0.019390370108969934 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.7899441340782123, "acc_stderr": 0.013623755371333531, "acc_norm": 0.7899441340782123, "acc_norm_stderr": 0.013623755371333531 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8823529411764706, "acc_stderr": 0.018448530829034672, "acc_norm": 0.8823529411764706, "acc_norm_stderr": 0.018448530829034672 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8810289389067524, "acc_stderr": 0.018388017461905378, "acc_norm": 0.8810289389067524, "acc_norm_stderr": 0.018388017461905378 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8734567901234568, "acc_stderr": 0.01849860055879091, "acc_norm": 0.8734567901234568, "acc_norm_stderr": 0.01849860055879091 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.7340425531914894, "acc_stderr": 0.02635806569888059, "acc_norm": 0.7340425531914894, "acc_norm_stderr": 0.02635806569888059 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.7183833116036505, "acc_stderr": 0.0114877832727867, "acc_norm": 0.7183833116036505, "acc_norm_stderr": 0.0114877832727867 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.9007352941176471, "acc_stderr": 0.018163995046407498, "acc_norm": 0.9007352941176471, "acc_norm_stderr": 0.018163995046407498 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.8676470588235294, "acc_stderr": 0.013709377734592321, "acc_norm": 0.8676470588235294, "acc_norm_stderr": 0.013709377734592321 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03955932861795833, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8489795918367347, "acc_stderr": 0.022923004094736847, "acc_norm": 0.8489795918367347, "acc_norm_stderr": 0.022923004094736847 }, "harness|hendrycksTest-sociology|5": { "acc": 0.9253731343283582, "acc_stderr": 0.01858193969849063, "acc_norm": 0.9253731343283582, "acc_norm_stderr": 0.01858193969849063 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.96, "acc_stderr": 0.01969463855669321, "acc_norm": 0.96, "acc_norm_stderr": 0.01969463855669321 }, "harness|hendrycksTest-virology|5": { "acc": 0.6204819277108434, "acc_stderr": 0.037777988227480165, "acc_norm": 0.6204819277108434, "acc_norm_stderr": 0.037777988227480165 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.9298245614035088, "acc_stderr": 0.019591541754525123, "acc_norm": 0.9298245614035088, "acc_norm_stderr": 0.019591541754525123 }, "harness|truthfulqa:mc|0": { "mc1": 0.401468788249694, "mc1_stderr": 0.017160273901693654, "mc2": 0.581997849772674, "mc2_stderr": 0.01539700975904714 }, "harness|winogrande|5": { "acc": 0.8082083662194159, "acc_stderr": 0.011065209664659527 }, "harness|gsm8k|5": { "acc": 0.3297952994692949, "acc_stderr": 0.01294995503057115 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_AA051612__B0122
[ "region:us" ]
2024-01-22T18:16:26+00:00
{"pretty_name": "Evaluation run of AA051612/B0122", "dataset_summary": "Dataset automatically created during the evaluation run of model [AA051612/B0122](https://huggingface.co/AA051612/B0122) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_AA051612__B0122\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T18:14:13.351822](https://huggingface.co/datasets/open-llm-leaderboard/details_AA051612__B0122/blob/main/results_2024-01-22T18-14-13.351822.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.8018093992937142,\n \"acc_stderr\": 0.025943636891982935,\n \"acc_norm\": 0.8135255711326126,\n \"acc_norm_stderr\": 0.026390358482713747,\n \"mc1\": 0.401468788249694,\n \"mc1_stderr\": 0.017160273901693654,\n \"mc2\": 0.581997849772674,\n \"mc2_stderr\": 0.01539700975904714\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.643344709897611,\n \"acc_stderr\": 0.013998056902620194,\n \"acc_norm\": 0.6791808873720137,\n \"acc_norm_stderr\": 0.01364094309194653\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6594303923521211,\n \"acc_stderr\": 0.004729322613301549,\n \"acc_norm\": 0.8492332204740092,\n \"acc_norm_stderr\": 0.0035709011883580744\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.8222222222222222,\n \"acc_stderr\": 0.03302789859901717,\n \"acc_norm\": 0.8222222222222222,\n \"acc_norm_stderr\": 0.03302789859901717\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.868421052631579,\n \"acc_stderr\": 0.027508689533549912,\n \"acc_norm\": 0.868421052631579,\n \"acc_norm_stderr\": 0.027508689533549912\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.03942772444036623,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.03942772444036623\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8679245283018868,\n \"acc_stderr\": 0.020837715430694004,\n \"acc_norm\": 0.8679245283018868,\n \"acc_norm_stderr\": 0.020837715430694004\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.9305555555555556,\n \"acc_stderr\": 0.021257974822832048,\n \"acc_norm\": 0.9305555555555556,\n \"acc_norm_stderr\": 0.021257974822832048\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7745664739884393,\n \"acc_stderr\": 0.03186209851641144,\n \"acc_norm\": 0.7745664739884393,\n \"acc_norm_stderr\": 0.03186209851641144\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.6568627450980392,\n \"acc_stderr\": 0.04724007352383888,\n \"acc_norm\": 0.6568627450980392,\n \"acc_norm_stderr\": 0.04724007352383888\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.8212765957446808,\n \"acc_stderr\": 0.025045373272050978,\n \"acc_norm\": 0.8212765957446808,\n \"acc_norm_stderr\": 0.025045373272050978\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.6491228070175439,\n \"acc_stderr\": 0.04489539350270698,\n \"acc_norm\": 0.6491228070175439,\n \"acc_norm_stderr\": 0.04489539350270698\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.8068965517241379,\n \"acc_stderr\": 0.03289445522127403,\n \"acc_norm\": 0.8068965517241379,\n \"acc_norm_stderr\": 0.03289445522127403\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.7645502645502645,\n \"acc_stderr\": 0.021851509822031722,\n \"acc_norm\": 0.7645502645502645,\n \"acc_norm_stderr\": 0.021851509822031722\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5793650793650794,\n \"acc_stderr\": 0.04415438226743745,\n \"acc_norm\": 0.5793650793650794,\n \"acc_norm_stderr\": 0.04415438226743745\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.9193548387096774,\n \"acc_stderr\": 0.015490002961591035,\n \"acc_norm\": 0.9193548387096774,\n \"acc_norm_stderr\": 0.015490002961591035\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6945812807881774,\n \"acc_stderr\": 0.03240661565868408,\n \"acc_norm\": 0.6945812807881774,\n \"acc_norm_stderr\": 0.03240661565868408\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.035887028128263714,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.035887028128263714\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.9212121212121213,\n \"acc_stderr\": 0.021037183825716364,\n \"acc_norm\": 0.9212121212121213,\n \"acc_norm_stderr\": 0.021037183825716364\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9444444444444444,\n \"acc_stderr\": 0.0163199507007674,\n \"acc_norm\": 0.9444444444444444,\n \"acc_norm_stderr\": 0.0163199507007674\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9740932642487047,\n \"acc_stderr\": 0.01146452335695318,\n \"acc_norm\": 0.9740932642487047,\n \"acc_norm_stderr\": 0.01146452335695318\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.016950599120913946,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.016950599120913946\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.5481481481481482,\n \"acc_stderr\": 0.030343862998512633,\n \"acc_norm\": 0.5481481481481482,\n \"acc_norm_stderr\": 0.030343862998512633\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.9033613445378151,\n \"acc_stderr\": 0.019192520709708727,\n \"acc_norm\": 0.9033613445378151,\n \"acc_norm_stderr\": 0.019192520709708727\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.5894039735099338,\n \"acc_stderr\": 0.04016689594849929,\n \"acc_norm\": 0.5894039735099338,\n \"acc_norm_stderr\": 0.04016689594849929\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9376146788990826,\n \"acc_stderr\": 0.01036940784904347,\n \"acc_norm\": 0.9376146788990826,\n \"acc_norm_stderr\": 0.01036940784904347\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.030058202704309846,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.030058202704309846\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9362745098039216,\n \"acc_stderr\": 0.01714392165552496,\n \"acc_norm\": 0.9362745098039216,\n \"acc_norm_stderr\": 0.01714392165552496\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9324894514767933,\n \"acc_stderr\": 0.016332466673244395,\n \"acc_norm\": 0.9324894514767933,\n \"acc_norm_stderr\": 0.016332466673244395\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8251121076233184,\n \"acc_stderr\": 0.02549528462644497,\n \"acc_norm\": 0.8251121076233184,\n \"acc_norm_stderr\": 0.02549528462644497\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.9236641221374046,\n \"acc_stderr\": 0.02328893953617374,\n \"acc_norm\": 0.9236641221374046,\n \"acc_norm_stderr\": 0.02328893953617374\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.9256198347107438,\n \"acc_stderr\": 0.023952688836676752,\n \"acc_norm\": 0.9256198347107438,\n \"acc_norm_stderr\": 0.023952688836676752\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.9259259259259259,\n \"acc_stderr\": 0.025317997297209734,\n \"acc_norm\": 0.9259259259259259,\n \"acc_norm_stderr\": 0.025317997297209734\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.901840490797546,\n \"acc_stderr\": 0.023376180231059602,\n \"acc_norm\": 0.901840490797546,\n \"acc_norm_stderr\": 0.023376180231059602\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.6517857142857143,\n \"acc_stderr\": 0.045218299028335865,\n \"acc_norm\": 0.6517857142857143,\n \"acc_norm_stderr\": 0.045218299028335865\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.9223300970873787,\n \"acc_stderr\": 0.026501440784762752,\n \"acc_norm\": 0.9223300970873787,\n \"acc_norm_stderr\": 0.026501440784762752\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9743589743589743,\n \"acc_stderr\": 0.010354979197709018,\n \"acc_norm\": 0.9743589743589743,\n \"acc_norm_stderr\": 0.010354979197709018\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.92,\n \"acc_stderr\": 0.027265992434429086,\n \"acc_norm\": 0.92,\n \"acc_norm_stderr\": 0.027265992434429086\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.9374201787994891,\n \"acc_stderr\": 0.00866125712060537,\n \"acc_norm\": 0.9374201787994891,\n \"acc_norm_stderr\": 0.00866125712060537\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.846820809248555,\n \"acc_stderr\": 0.019390370108969934,\n \"acc_norm\": 0.846820809248555,\n \"acc_norm_stderr\": 0.019390370108969934\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.7899441340782123,\n \"acc_stderr\": 0.013623755371333531,\n \"acc_norm\": 0.7899441340782123,\n \"acc_norm_stderr\": 0.013623755371333531\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8823529411764706,\n \"acc_stderr\": 0.018448530829034672,\n \"acc_norm\": 0.8823529411764706,\n \"acc_norm_stderr\": 0.018448530829034672\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8810289389067524,\n \"acc_stderr\": 0.018388017461905378,\n \"acc_norm\": 0.8810289389067524,\n \"acc_norm_stderr\": 0.018388017461905378\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8734567901234568,\n \"acc_stderr\": 0.01849860055879091,\n \"acc_norm\": 0.8734567901234568,\n \"acc_norm_stderr\": 0.01849860055879091\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.7340425531914894,\n \"acc_stderr\": 0.02635806569888059,\n \"acc_norm\": 0.7340425531914894,\n \"acc_norm_stderr\": 0.02635806569888059\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.7183833116036505,\n \"acc_stderr\": 0.0114877832727867,\n \"acc_norm\": 0.7183833116036505,\n \"acc_norm_stderr\": 0.0114877832727867\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.9007352941176471,\n \"acc_stderr\": 0.018163995046407498,\n \"acc_norm\": 0.9007352941176471,\n \"acc_norm_stderr\": 0.018163995046407498\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.8676470588235294,\n \"acc_stderr\": 0.013709377734592321,\n \"acc_norm\": 0.8676470588235294,\n \"acc_norm_stderr\": 0.013709377734592321\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8489795918367347,\n \"acc_stderr\": 0.022923004094736847,\n \"acc_norm\": 0.8489795918367347,\n \"acc_norm_stderr\": 0.022923004094736847\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.9253731343283582,\n \"acc_stderr\": 0.01858193969849063,\n \"acc_norm\": 0.9253731343283582,\n \"acc_norm_stderr\": 0.01858193969849063\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.96,\n \"acc_stderr\": 0.01969463855669321,\n \"acc_norm\": 0.96,\n \"acc_norm_stderr\": 0.01969463855669321\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.6204819277108434,\n \"acc_stderr\": 0.037777988227480165,\n \"acc_norm\": 0.6204819277108434,\n \"acc_norm_stderr\": 0.037777988227480165\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.9298245614035088,\n \"acc_stderr\": 0.019591541754525123,\n \"acc_norm\": 0.9298245614035088,\n \"acc_norm_stderr\": 0.019591541754525123\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.401468788249694,\n \"mc1_stderr\": 0.017160273901693654,\n \"mc2\": 0.581997849772674,\n \"mc2_stderr\": 0.01539700975904714\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8082083662194159,\n \"acc_stderr\": 0.011065209664659527\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3297952994692949,\n \"acc_stderr\": 0.01294995503057115\n }\n}\n```", "repo_url": "https://huggingface.co/AA051612/B0122", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|arc:challenge|25_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|gsm8k|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hellaswag|10_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T18-14-13.351822.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["**/details_harness|winogrande|5_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T18-14-13.351822.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T18_14_13.351822", "path": ["results_2024-01-22T18-14-13.351822.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T18-14-13.351822.parquet"]}]}]}
2024-01-22T18:16:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of AA051612/B0122 Dataset automatically created during the evaluation run of model AA051612/B0122 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T18:14:13.351822(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of AA051612/B0122\n\n\n\nDataset automatically created during the evaluation run of model AA051612/B0122 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T18:14:13.351822(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of AA051612/B0122\n\n\n\nDataset automatically created during the evaluation run of model AA051612/B0122 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T18:14:13.351822(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
2c04bd17ea29cada9f45f86412d1530206d10cc0
# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m](https://huggingface.co/silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-e2m", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T18:17:22.033616](https://huggingface.co/datasets/open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-e2m/blob/main/results_2024-01-22T18-17-22.033616.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6041987897040657, "acc_stderr": 0.03330812568997769, "acc_norm": 0.6090228717596228, "acc_norm_stderr": 0.03398555402708723, "mc1": 0.4700122399020808, "mc1_stderr": 0.01747199209169754, "mc2": 0.6377705503064354, "mc2_stderr": 0.015389580893710017 }, "harness|arc:challenge|25": { "acc": 0.5546075085324232, "acc_stderr": 0.014523987638344081, "acc_norm": 0.5947098976109215, "acc_norm_stderr": 0.014346869060229313 }, "harness|hellaswag|10": { "acc": 0.6367257518422625, "acc_stderr": 0.004799599840397376, "acc_norm": 0.8333997211710814, "acc_norm_stderr": 0.0037185707927195636 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5481481481481482, "acc_stderr": 0.04299268905480864, "acc_norm": 0.5481481481481482, "acc_norm_stderr": 0.04299268905480864 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6052631578947368, "acc_stderr": 0.039777499346220734, "acc_norm": 0.6052631578947368, "acc_norm_stderr": 0.039777499346220734 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6597222222222222, "acc_stderr": 0.039621355734862175, "acc_norm": 0.6597222222222222, "acc_norm_stderr": 0.039621355734862175 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5953757225433526, "acc_stderr": 0.03742461193887248, "acc_norm": 0.5953757225433526, "acc_norm_stderr": 0.03742461193887248 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.049406356306056595, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5404255319148936, "acc_stderr": 0.032579014820998356, "acc_norm": 0.5404255319148936, "acc_norm_stderr": 0.032579014820998356 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.046774730044911984, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.046774730044911984 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.025107425481137285, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.025107425481137285 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3888888888888889, "acc_stderr": 0.04360314860077459, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.04360314860077459 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6838709677419355, "acc_stderr": 0.02645087448904277, "acc_norm": 0.6838709677419355, "acc_norm_stderr": 0.02645087448904277 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.035179450386910616, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.65, "acc_stderr": 0.047937248544110196, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7393939393939394, "acc_stderr": 0.034277431758165236, "acc_norm": 0.7393939393939394, "acc_norm_stderr": 0.034277431758165236 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7272727272727273, "acc_stderr": 0.03173071239071724, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.03173071239071724 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.844559585492228, "acc_stderr": 0.02614848346915332, "acc_norm": 0.844559585492228, "acc_norm_stderr": 0.02614848346915332 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5743589743589743, "acc_stderr": 0.025069094387296535, "acc_norm": 0.5743589743589743, "acc_norm_stderr": 0.025069094387296535 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.029185714949857413, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.029185714949857413 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6092436974789915, "acc_stderr": 0.031693802357129965, "acc_norm": 0.6092436974789915, "acc_norm_stderr": 0.031693802357129965 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7926605504587156, "acc_stderr": 0.01738141556360868, "acc_norm": 0.7926605504587156, "acc_norm_stderr": 0.01738141556360868 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4351851851851852, "acc_stderr": 0.03381200005643525, "acc_norm": 0.4351851851851852, "acc_norm_stderr": 0.03381200005643525 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7401960784313726, "acc_stderr": 0.030778554678693257, "acc_norm": 0.7401960784313726, "acc_norm_stderr": 0.030778554678693257 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7510548523206751, "acc_stderr": 0.028146970599422644, "acc_norm": 0.7510548523206751, "acc_norm_stderr": 0.028146970599422644 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.600896860986547, "acc_stderr": 0.032867453125679603, "acc_norm": 0.600896860986547, "acc_norm_stderr": 0.032867453125679603 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6793893129770993, "acc_stderr": 0.04093329229834278, "acc_norm": 0.6793893129770993, "acc_norm_stderr": 0.04093329229834278 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228732, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228732 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.042844679680521934, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.042844679680521934 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7239263803680982, "acc_stderr": 0.035123852837050475, "acc_norm": 0.7239263803680982, "acc_norm_stderr": 0.035123852837050475 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.7378640776699029, "acc_stderr": 0.04354631077260594, "acc_norm": 0.7378640776699029, "acc_norm_stderr": 0.04354631077260594 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.02250903393707779, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.02250903393707779 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7675606641123882, "acc_stderr": 0.015104550008905713, "acc_norm": 0.7675606641123882, "acc_norm_stderr": 0.015104550008905713 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6647398843930635, "acc_stderr": 0.025416003773165545, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.025416003773165545 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.37206703910614525, "acc_stderr": 0.016165847583563295, "acc_norm": 0.37206703910614525, "acc_norm_stderr": 0.016165847583563295 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6830065359477124, "acc_stderr": 0.026643278474508755, "acc_norm": 0.6830065359477124, "acc_norm_stderr": 0.026643278474508755 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6720257234726688, "acc_stderr": 0.02666441088693762, "acc_norm": 0.6720257234726688, "acc_norm_stderr": 0.02666441088693762 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6882716049382716, "acc_stderr": 0.025773111169630457, "acc_norm": 0.6882716049382716, "acc_norm_stderr": 0.025773111169630457 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46099290780141844, "acc_stderr": 0.02973659252642444, "acc_norm": 0.46099290780141844, "acc_norm_stderr": 0.02973659252642444 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4198174706649283, "acc_stderr": 0.012604960816087378, "acc_norm": 0.4198174706649283, "acc_norm_stderr": 0.012604960816087378 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5882352941176471, "acc_stderr": 0.029896163033125474, "acc_norm": 0.5882352941176471, "acc_norm_stderr": 0.029896163033125474 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6094771241830066, "acc_stderr": 0.019737008998094597, "acc_norm": 0.6094771241830066, "acc_norm_stderr": 0.019737008998094597 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.04494290866252091, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.04494290866252091 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.5, "acc_stderr": 0.03892494720807614, "acc_norm": 0.5, "acc_norm_stderr": 0.03892494720807614 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.4700122399020808, "mc1_stderr": 0.01747199209169754, "mc2": 0.6377705503064354, "mc2_stderr": 0.015389580893710017 }, "harness|winogrande|5": { "acc": 0.7647987371744278, "acc_stderr": 0.011920008163650865 }, "harness|gsm8k|5": { "acc": 0.3957543593631539, "acc_stderr": 0.013469823701048812 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-e2m
[ "region:us" ]
2024-01-22T18:19:39+00:00
{"pretty_name": "Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m", "dataset_summary": "Dataset automatically created during the evaluation run of model [silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m](https://huggingface.co/silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-e2m\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T18:17:22.033616](https://huggingface.co/datasets/open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-e2m/blob/main/results_2024-01-22T18-17-22.033616.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6041987897040657,\n \"acc_stderr\": 0.03330812568997769,\n \"acc_norm\": 0.6090228717596228,\n \"acc_norm_stderr\": 0.03398555402708723,\n \"mc1\": 0.4700122399020808,\n \"mc1_stderr\": 0.01747199209169754,\n \"mc2\": 0.6377705503064354,\n \"mc2_stderr\": 0.015389580893710017\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5546075085324232,\n \"acc_stderr\": 0.014523987638344081,\n \"acc_norm\": 0.5947098976109215,\n \"acc_norm_stderr\": 0.014346869060229313\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6367257518422625,\n \"acc_stderr\": 0.004799599840397376,\n \"acc_norm\": 0.8333997211710814,\n \"acc_norm_stderr\": 0.0037185707927195636\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5481481481481482,\n \"acc_stderr\": 0.04299268905480864,\n \"acc_norm\": 0.5481481481481482,\n \"acc_norm_stderr\": 0.04299268905480864\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6052631578947368,\n \"acc_stderr\": 0.039777499346220734,\n \"acc_norm\": 0.6052631578947368,\n \"acc_norm_stderr\": 0.039777499346220734\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6597222222222222,\n \"acc_stderr\": 0.039621355734862175,\n \"acc_norm\": 0.6597222222222222,\n \"acc_norm_stderr\": 0.039621355734862175\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5953757225433526,\n \"acc_stderr\": 0.03742461193887248,\n \"acc_norm\": 0.5953757225433526,\n \"acc_norm_stderr\": 0.03742461193887248\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5404255319148936,\n \"acc_stderr\": 0.032579014820998356,\n \"acc_norm\": 0.5404255319148936,\n \"acc_norm_stderr\": 0.032579014820998356\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.046774730044911984,\n \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.046774730044911984\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.025107425481137285,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.025107425481137285\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.04360314860077459,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.04360314860077459\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6838709677419355,\n \"acc_stderr\": 0.02645087448904277,\n \"acc_norm\": 0.6838709677419355,\n \"acc_norm_stderr\": 0.02645087448904277\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7393939393939394,\n \"acc_stderr\": 0.034277431758165236,\n \"acc_norm\": 0.7393939393939394,\n \"acc_norm_stderr\": 0.034277431758165236\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.03173071239071724,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03173071239071724\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.844559585492228,\n \"acc_stderr\": 0.02614848346915332,\n \"acc_norm\": 0.844559585492228,\n \"acc_norm_stderr\": 0.02614848346915332\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5743589743589743,\n \"acc_stderr\": 0.025069094387296535,\n \"acc_norm\": 0.5743589743589743,\n \"acc_norm_stderr\": 0.025069094387296535\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.029185714949857413,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.029185714949857413\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6092436974789915,\n \"acc_stderr\": 0.031693802357129965,\n \"acc_norm\": 0.6092436974789915,\n \"acc_norm_stderr\": 0.031693802357129965\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7926605504587156,\n \"acc_stderr\": 0.01738141556360868,\n \"acc_norm\": 0.7926605504587156,\n \"acc_norm_stderr\": 0.01738141556360868\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4351851851851852,\n \"acc_stderr\": 0.03381200005643525,\n \"acc_norm\": 0.4351851851851852,\n \"acc_norm_stderr\": 0.03381200005643525\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7401960784313726,\n \"acc_stderr\": 0.030778554678693257,\n \"acc_norm\": 0.7401960784313726,\n \"acc_norm_stderr\": 0.030778554678693257\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7510548523206751,\n \"acc_stderr\": 0.028146970599422644,\n \"acc_norm\": 0.7510548523206751,\n \"acc_norm_stderr\": 0.028146970599422644\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.600896860986547,\n \"acc_stderr\": 0.032867453125679603,\n \"acc_norm\": 0.600896860986547,\n \"acc_norm_stderr\": 0.032867453125679603\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6793893129770993,\n \"acc_stderr\": 0.04093329229834278,\n \"acc_norm\": 0.6793893129770993,\n \"acc_norm_stderr\": 0.04093329229834278\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228732,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228732\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7239263803680982,\n \"acc_stderr\": 0.035123852837050475,\n \"acc_norm\": 0.7239263803680982,\n \"acc_norm_stderr\": 0.035123852837050475\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7378640776699029,\n \"acc_stderr\": 0.04354631077260594,\n \"acc_norm\": 0.7378640776699029,\n \"acc_norm_stderr\": 0.04354631077260594\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.02250903393707779,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.02250903393707779\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7675606641123882,\n \"acc_stderr\": 0.015104550008905713,\n \"acc_norm\": 0.7675606641123882,\n \"acc_norm_stderr\": 0.015104550008905713\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.025416003773165545,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.025416003773165545\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.37206703910614525,\n \"acc_stderr\": 0.016165847583563295,\n \"acc_norm\": 0.37206703910614525,\n \"acc_norm_stderr\": 0.016165847583563295\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6830065359477124,\n \"acc_stderr\": 0.026643278474508755,\n \"acc_norm\": 0.6830065359477124,\n \"acc_norm_stderr\": 0.026643278474508755\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6720257234726688,\n \"acc_stderr\": 0.02666441088693762,\n \"acc_norm\": 0.6720257234726688,\n \"acc_norm_stderr\": 0.02666441088693762\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6882716049382716,\n \"acc_stderr\": 0.025773111169630457,\n \"acc_norm\": 0.6882716049382716,\n \"acc_norm_stderr\": 0.025773111169630457\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46099290780141844,\n \"acc_stderr\": 0.02973659252642444,\n \"acc_norm\": 0.46099290780141844,\n \"acc_norm_stderr\": 0.02973659252642444\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4198174706649283,\n \"acc_stderr\": 0.012604960816087378,\n \"acc_norm\": 0.4198174706649283,\n \"acc_norm_stderr\": 0.012604960816087378\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5882352941176471,\n \"acc_stderr\": 0.029896163033125474,\n \"acc_norm\": 0.5882352941176471,\n \"acc_norm_stderr\": 0.029896163033125474\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6094771241830066,\n \"acc_stderr\": 0.019737008998094597,\n \"acc_norm\": 0.6094771241830066,\n \"acc_norm_stderr\": 0.019737008998094597\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252091,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252091\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.03892494720807614,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.03892494720807614\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4700122399020808,\n \"mc1_stderr\": 0.01747199209169754,\n \"mc2\": 0.6377705503064354,\n \"mc2_stderr\": 0.015389580893710017\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7647987371744278,\n \"acc_stderr\": 0.011920008163650865\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3957543593631539,\n \"acc_stderr\": 0.013469823701048812\n }\n}\n```", "repo_url": "https://huggingface.co/silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|arc:challenge|25_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|gsm8k|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hellaswag|10_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T18-17-22.033616.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["**/details_harness|winogrande|5_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T18-17-22.033616.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T18_17_22.033616", "path": ["results_2024-01-22T18-17-22.033616.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T18-17-22.033616.parquet"]}]}]}
2024-01-22T18:20:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m Dataset automatically created during the evaluation run of model silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T18:17:22.033616(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m\n\n\n\nDataset automatically created during the evaluation run of model silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T18:17:22.033616(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m\n\n\n\nDataset automatically created during the evaluation run of model silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-e2m on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T18:17:22.033616(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
9247f19b06c529c7caa35e165e204a557b90955f
# sample_tables
mkaushik/test_table
[ "region:us" ]
2024-01-22T18:30:40+00:00
{}
2024-01-22T18:32:55+00:00
[]
[]
TAGS #region-us
# sample_tables
[ "# sample_tables" ]
[ "TAGS\n#region-us \n", "# sample_tables" ]
6a005d0a3d539549cee6b96a59f7ebf16a0671ac
# wice WiCE: Real-World Entailment for Claims in Wikipedia ## Dataset Details ### Dataset Description WiCE is a fine-grained textual entailment dataset built on natural claim and evidence pairs extracted from Wikipedia. Given a sentence in Wikipedia and the corresponding article(s) it cites, we annotate the entailment label, a list of sentences in the cited article(s) that support the claim sentence, and tokens in the claim that are unsupported by the article(s). This is the `entailment_retrieval` subset that includes the WiCE dataset for entailment and retrieval task. `claim` includes data with the original claims and `subclaim` includes data with the decomposed claims (finegrained annotation by using Claim-Split). ### Dataset Sources <!-- Provide the basic links for the dataset. --> - **Repository:** https://github.com/ryokamoi/wice - **Paper:** Kamoi, Goyal, Rodriguez, and Durett(2023) [WiCE: Real-World Entailment for Claims in Wikipedia ](https://arxiv.org/abs/2303.01432). ## Citation <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> ```bibtex @inproceedings{kamoi-etal-2023-wice, title = "{W}i{CE}: Real-World Entailment for Claims in {W}ikipedia", author = "Kamoi, Ryo and Goyal, Tanya and Rodriguez, Juan and Durrett, Greg", editor = "Bouamor, Houda and Pino, Juan and Bali, Kalika", booktitle = "Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing", month = dec, year = "2023", address = "Singapore", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/2023.emnlp-main.470", pages = "7561--7583", } ```
jon-tow/wice
[ "license:odc-by", "arxiv:2303.01432", "region:us" ]
2024-01-22T18:55:32+00:00
{"license": "odc-by", "configs": [{"config_name": "claim", "data_files": [{"split": "train", "path": "data/claim_train.jsonl"}, {"split": "validation", "path": "data/claim_dev.jsonl"}, {"split": "test", "path": "data/claim_test.jsonl"}]}, {"config_name": "subclaim", "data_files": [{"split": "train", "path": "data/subclaim_train.jsonl"}, {"split": "validation", "path": "data/subclaim_dev.jsonl"}, {"split": "test", "path": "data/subclaim_test.jsonl"}]}]}
2024-01-22T20:16:00+00:00
[ "2303.01432" ]
[]
TAGS #license-odc-by #arxiv-2303.01432 #region-us
# wice WiCE: Real-World Entailment for Claims in Wikipedia ## Dataset Details ### Dataset Description WiCE is a fine-grained textual entailment dataset built on natural claim and evidence pairs extracted from Wikipedia. Given a sentence in Wikipedia and the corresponding article(s) it cites, we annotate the entailment label, a list of sentences in the cited article(s) that support the claim sentence, and tokens in the claim that are unsupported by the article(s). This is the 'entailment_retrieval' subset that includes the WiCE dataset for entailment and retrieval task. 'claim' includes data with the original claims and 'subclaim' includes data with the decomposed claims (finegrained annotation by using Claim-Split). ### Dataset Sources - Repository: URL - Paper: Kamoi, Goyal, Rodriguez, and Durett(2023) WiCE: Real-World Entailment for Claims in Wikipedia .
[ "# wice\n\nWiCE: Real-World Entailment for Claims in Wikipedia", "## Dataset Details", "### Dataset Description\n\nWiCE is a fine-grained textual entailment dataset built on natural claim and evidence pairs extracted from Wikipedia. Given a sentence in Wikipedia and the corresponding article(s) it cites, we annotate the entailment label, a list of sentences in the cited article(s) that support the claim sentence, and tokens in the claim that are unsupported by the article(s).\n\nThis is the 'entailment_retrieval' subset that includes the WiCE dataset for entailment and retrieval task. 'claim' includes data with the original claims and 'subclaim' includes data with the decomposed claims (finegrained annotation by using Claim-Split).", "### Dataset Sources\n\n\n\n- Repository: URL\n- Paper: Kamoi, Goyal, Rodriguez, and Durett(2023) WiCE: Real-World Entailment for Claims in Wikipedia\n." ]
[ "TAGS\n#license-odc-by #arxiv-2303.01432 #region-us \n", "# wice\n\nWiCE: Real-World Entailment for Claims in Wikipedia", "## Dataset Details", "### Dataset Description\n\nWiCE is a fine-grained textual entailment dataset built on natural claim and evidence pairs extracted from Wikipedia. Given a sentence in Wikipedia and the corresponding article(s) it cites, we annotate the entailment label, a list of sentences in the cited article(s) that support the claim sentence, and tokens in the claim that are unsupported by the article(s).\n\nThis is the 'entailment_retrieval' subset that includes the WiCE dataset for entailment and retrieval task. 'claim' includes data with the original claims and 'subclaim' includes data with the decomposed claims (finegrained annotation by using Claim-Split).", "### Dataset Sources\n\n\n\n- Repository: URL\n- Paper: Kamoi, Goyal, Rodriguez, and Durett(2023) WiCE: Real-World Entailment for Claims in Wikipedia\n." ]
c29655ec5d31268835333d5df45aae65773fea78
# Dataset Card for "agieval-math" Dataset taken from https://github.com/microsoft/AGIEval and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the MATH subtask of AGIEval, as accessed in https://github.com/ruixiangcui/AGIEval/commit/5c77d073fda993f1652eaae3cf5d04cc5fd21d40 . Citation: @misc {zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} }
hails/agieval-math
[ "arxiv:2304.06364", "region:us" ]
2024-01-22T19:21:05+00:00
{"dataset_info": {"features": [{"name": "query", "dtype": "string"}, {"name": "answer", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 226532, "num_examples": 1000}], "download_size": 122070, "dataset_size": 226532}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]}
2024-01-26T18:29:38+00:00
[ "2304.06364" ]
[]
TAGS #arxiv-2304.06364 #region-us
# Dataset Card for "agieval-math" Dataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the MATH subtask of AGIEval, as accessed in URL . Citation: @misc {zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} }
[ "# Dataset Card for \"agieval-math\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the MATH subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\n@misc\n\n{zhong2023agieval,\ntitle={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models},\nauthor={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan},\nyear={2023},\neprint={2304.06364},\narchivePrefix={arXiv},\nprimaryClass={cs.CL}\n}" ]
[ "TAGS\n#arxiv-2304.06364 #region-us \n", "# Dataset Card for \"agieval-math\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the MATH subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\n@misc\n\n{zhong2023agieval,\ntitle={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models},\nauthor={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan},\nyear={2023},\neprint={2304.06364},\narchivePrefix={arXiv},\nprimaryClass={cs.CL}\n}" ]
7532f5cf82baa2a545b0e73428f4f999c66db5e4
# Dataset Card for "uf_unsafe_v2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yimingzhang/uf_unsafe_v2
[ "region:us" ]
2024-01-22T19:21:59+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_prefs", "path": "data/train_prefs-*"}, {"split": "test_prefs", "path": "data/test_prefs-*"}]}], "dataset_info": {"features": [{"name": "chosen", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "rejected", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}], "splits": [{"name": "train_prefs", "num_bytes": 15634, "num_examples": 32}, {"name": "test_prefs", "num_bytes": 82728, "num_examples": 172}], "download_size": 54937, "dataset_size": 98362}}
2024-01-22T19:22:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for "uf_unsafe_v2" More Information needed
[ "# Dataset Card for \"uf_unsafe_v2\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"uf_unsafe_v2\"\n\nMore Information needed" ]
10d014460c023efd8a6df83e46c0802594e2164c
# Dataset Card for "hub-report-dataset" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Kevinger/hub-report-dataset
[ "region:us" ]
2024-01-22T19:25:32+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}, {"split": "valid", "path": "data/valid-*"}]}], "dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "score", "dtype": "float64"}, {"name": "title", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "business", "dtype": "int64"}, {"name": "crime", "dtype": "int64"}, {"name": "culture", "dtype": "int64"}, {"name": "entertainment", "dtype": "int64"}, {"name": "politics", "dtype": "int64"}, {"name": "science", "dtype": "int64"}, {"name": "sports", "dtype": "int64"}, {"name": "weather", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 6111039.522317189, "num_examples": 2211}, {"name": "test", "num_bytes": 1310100.7388414056, "num_examples": 474}, {"name": "valid", "num_bytes": 1310100.7388414056, "num_examples": 474}], "download_size": 5314452, "dataset_size": 8731241.0}}
2024-01-22T22:02:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for "hub-report-dataset" More Information needed
[ "# Dataset Card for \"hub-report-dataset\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"hub-report-dataset\"\n\nMore Information needed" ]
1026a2e18c09b8104c742381e46444656b6eb55e
# Dataset Card for "cai-conversation-dev1705951688" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
vwxyzjn/cai-conversation-dev1705951688
[ "region:us" ]
2024-01-22T19:33:05+00:00
{"dataset_info": {"features": [{"name": "init_prompt", "dtype": "string"}, {"name": "init_response", "dtype": "string"}, {"name": "critic_prompt", "dtype": "string"}, {"name": "critic_response", "dtype": "string"}, {"name": "revision_prompt", "dtype": "string"}, {"name": "revision_response", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "messages", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "chosen", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "rejected", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}], "splits": [{"name": "train_sft", "num_bytes": 197459, "num_examples": 64}, {"name": "train_prefs", "num_bytes": 194862, "num_examples": 64}, {"name": "test_sft", "num_bytes": 204318, "num_examples": 64}, {"name": "test_prefs", "num_bytes": 200773, "num_examples": 64}], "download_size": 433489, "dataset_size": 797412}, "configs": [{"config_name": "default", "data_files": [{"split": "train_sft", "path": "data/train_sft-*"}, {"split": "train_prefs", "path": "data/train_prefs-*"}, {"split": "test_sft", "path": "data/test_sft-*"}, {"split": "test_prefs", "path": "data/test_prefs-*"}]}]}
2024-01-22T19:33:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for "cai-conversation-dev1705951688" More Information needed
[ "# Dataset Card for \"cai-conversation-dev1705951688\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"cai-conversation-dev1705951688\"\n\nMore Information needed" ]
dd951a5b617f7cc14d77774433f5b02a086c57b6
# Dataset Card for "hub-report-classlabels" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Kevinger/hub-report-classlabels
[ "region:us" ]
2024-01-22T19:35:30+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}, {"split": "valid", "path": "data/valid-*"}]}], "dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "score", "dtype": "float64"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "business", "1": "crime", "2": "culture", "3": "entertainment", "4": "politics", "5": "science", "6": "sports", "7": "weather"}}}}, {"name": "title", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 6068451, "num_examples": 2211}, {"name": "test", "num_bytes": 1197234, "num_examples": 474}, {"name": "valid", "num_bytes": 1288652, "num_examples": 474}], "download_size": 5225035, "dataset_size": 8554337}}
2024-01-22T22:02:44+00:00
[]
[]
TAGS #region-us
# Dataset Card for "hub-report-classlabels" More Information needed
[ "# Dataset Card for \"hub-report-classlabels\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"hub-report-classlabels\"\n\nMore Information needed" ]
389343f0e1d10ce5f878317609125380b74649fb
# Dataset Card for "cai-conversation-dev1705950597" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
HuggingFaceH4/grok-conversation-harmless
[ "license:apache-2.0", "region:us" ]
2024-01-22T20:05:13+00:00
{"license": "apache-2.0", "dataset_info": {"features": [{"name": "init_prompt", "dtype": "string"}, {"name": "init_response", "dtype": "string"}, {"name": "critic_prompt", "dtype": "string"}, {"name": "critic_response", "dtype": "string"}, {"name": "revision_prompt", "dtype": "string"}, {"name": "revision_response", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "messages", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "chosen", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "rejected", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}], "splits": [{"name": "train_sft", "num_bytes": 64692005, "num_examples": 21268}, {"name": "train_prefs", "num_bytes": 64737329, "num_examples": 21269}, {"name": "test_sft", "num_bytes": 3504807, "num_examples": 1156}, {"name": "test_prefs", "num_bytes": 3554117, "num_examples": 1156}], "download_size": 56903392, "dataset_size": 136488258}, "configs": [{"config_name": "default", "data_files": [{"split": "train_sft", "path": "data/train_sft-*"}, {"split": "train_prefs", "path": "data/train_prefs-*"}, {"split": "test_sft", "path": "data/test_sft-*"}, {"split": "test_prefs", "path": "data/test_prefs-*"}]}]}
2024-02-02T04:11:22+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
# Dataset Card for "cai-conversation-dev1705950597" More Information needed
[ "# Dataset Card for \"cai-conversation-dev1705950597\"\n\nMore Information needed" ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "# Dataset Card for \"cai-conversation-dev1705950597\"\n\nMore Information needed" ]
fc9175c81ee8537ff83293af5070d9adae2b2c07
# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2](https://huggingface.co/silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T04:48:23.031751](https://huggingface.co/datasets/open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e2/blob/main/results_2024-01-23T04-48-23.031751.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6522942208634362, "acc_stderr": 0.03217101114207215, "acc_norm": 0.6517413263222946, "acc_norm_stderr": 0.03283932147996629, "mc1": 0.5973072215422277, "mc1_stderr": 0.01716883093518721, "mc2": 0.7214236695613149, "mc2_stderr": 0.01480035939963425 }, "harness|arc:challenge|25": { "acc": 0.71160409556314, "acc_stderr": 0.013238394422428171, "acc_norm": 0.7380546075085325, "acc_norm_stderr": 0.012849054826858108 }, "harness|hellaswag|10": { "acc": 0.7295359490141406, "acc_stderr": 0.004432917403755056, "acc_norm": 0.8884684325831508, "acc_norm_stderr": 0.0031414591751392704 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.041716541613545426, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.041716541613545426 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7169811320754716, "acc_stderr": 0.027724236492700914, "acc_norm": 0.7169811320754716, "acc_norm_stderr": 0.027724236492700914 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.75, "acc_stderr": 0.03621034121889507, "acc_norm": 0.75, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.653179190751445, "acc_stderr": 0.036291466701596636, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.036291466701596636 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482757, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482757 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41005291005291006, "acc_stderr": 0.025331202438944427, "acc_norm": 0.41005291005291006, "acc_norm_stderr": 0.025331202438944427 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7903225806451613, "acc_stderr": 0.023157879349083522, "acc_norm": 0.7903225806451613, "acc_norm_stderr": 0.023157879349083522 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5221674876847291, "acc_stderr": 0.03514528562175007, "acc_norm": 0.5221674876847291, "acc_norm_stderr": 0.03514528562175007 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.03287666758603491, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.03287666758603491 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586818, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586818 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9119170984455959, "acc_stderr": 0.02045374660160103, "acc_norm": 0.9119170984455959, "acc_norm_stderr": 0.02045374660160103 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6692307692307692, "acc_stderr": 0.02385479568097112, "acc_norm": 0.6692307692307692, "acc_norm_stderr": 0.02385479568097112 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.02882088466625326, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.02882088466625326 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6722689075630253, "acc_stderr": 0.03048991141767323, "acc_norm": 0.6722689075630253, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8403669724770643, "acc_stderr": 0.015703498348461763, "acc_norm": 0.8403669724770643, "acc_norm_stderr": 0.015703498348461763 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5, "acc_stderr": 0.034099716973523674, "acc_norm": 0.5, "acc_norm_stderr": 0.034099716973523674 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.02584501798692692, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.02584501798692692 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.025955020841621115, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.025955020841621115 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.038498560987940904, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.038498560987940904 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7914110429447853, "acc_stderr": 0.031921934489347235, "acc_norm": 0.7914110429447853, "acc_norm_stderr": 0.031921934489347235 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8675213675213675, "acc_stderr": 0.022209309073165612, "acc_norm": 0.8675213675213675, "acc_norm_stderr": 0.022209309073165612 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8301404853128991, "acc_stderr": 0.013428186370608306, "acc_norm": 0.8301404853128991, "acc_norm_stderr": 0.013428186370608306 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7341040462427746, "acc_stderr": 0.02378620325550829, "acc_norm": 0.7341040462427746, "acc_norm_stderr": 0.02378620325550829 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4324022346368715, "acc_stderr": 0.01656897123354861, "acc_norm": 0.4324022346368715, "acc_norm_stderr": 0.01656897123354861 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7254901960784313, "acc_stderr": 0.025553169991826524, "acc_norm": 0.7254901960784313, "acc_norm_stderr": 0.025553169991826524 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.02558306248998481, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.02558306248998481 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47196870925684486, "acc_stderr": 0.012750151802922435, "acc_norm": 0.47196870925684486, "acc_norm_stderr": 0.012750151802922435 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6691176470588235, "acc_stderr": 0.02858270975389845, "acc_norm": 0.6691176470588235, "acc_norm_stderr": 0.02858270975389845 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.673202614379085, "acc_stderr": 0.018975427920507208, "acc_norm": 0.673202614379085, "acc_norm_stderr": 0.018975427920507208 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7142857142857143, "acc_stderr": 0.0289205832206756, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.0289205832206756 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774709, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699122, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699122 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.5973072215422277, "mc1_stderr": 0.01716883093518721, "mc2": 0.7214236695613149, "mc2_stderr": 0.01480035939963425 }, "harness|winogrande|5": { "acc": 0.8326756116811366, "acc_stderr": 0.010490608806828075 }, "harness|gsm8k|5": { "acc": 0.6899166034874905, "acc_stderr": 0.012740305717376268 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e2
[ "region:us" ]
2024-01-22T20:24:50+00:00
{"pretty_name": "Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2", "dataset_summary": "Dataset automatically created during the evaluation run of model [silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2](https://huggingface.co/silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T04:48:23.031751](https://huggingface.co/datasets/open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e2/blob/main/results_2024-01-23T04-48-23.031751.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6522942208634362,\n \"acc_stderr\": 0.03217101114207215,\n \"acc_norm\": 0.6517413263222946,\n \"acc_norm_stderr\": 0.03283932147996629,\n \"mc1\": 0.5973072215422277,\n \"mc1_stderr\": 0.01716883093518721,\n \"mc2\": 0.7214236695613149,\n \"mc2_stderr\": 0.01480035939963425\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.71160409556314,\n \"acc_stderr\": 0.013238394422428171,\n \"acc_norm\": 0.7380546075085325,\n \"acc_norm_stderr\": 0.012849054826858108\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7295359490141406,\n \"acc_stderr\": 0.004432917403755056,\n \"acc_norm\": 0.8884684325831508,\n \"acc_norm_stderr\": 0.0031414591751392704\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7169811320754716,\n \"acc_stderr\": 0.027724236492700914,\n \"acc_norm\": 0.7169811320754716,\n \"acc_norm_stderr\": 0.027724236492700914\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482757,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482757\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n \"acc_stderr\": 0.025331202438944427,\n \"acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.025331202438944427\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.023157879349083522,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.023157879349083522\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5221674876847291,\n \"acc_stderr\": 0.03514528562175007,\n \"acc_norm\": 0.5221674876847291,\n \"acc_norm_stderr\": 0.03514528562175007\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.03287666758603491,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.03287666758603491\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586818,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586818\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9119170984455959,\n \"acc_stderr\": 0.02045374660160103,\n \"acc_norm\": 0.9119170984455959,\n \"acc_norm_stderr\": 0.02045374660160103\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.02385479568097112,\n \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.02385479568097112\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.02882088466625326,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.02882088466625326\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.03048991141767323,\n \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.03048991141767323\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8403669724770643,\n \"acc_stderr\": 0.015703498348461763,\n \"acc_norm\": 0.8403669724770643,\n \"acc_norm_stderr\": 0.015703498348461763\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.034099716973523674,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.034099716973523674\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.02584501798692692,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.02584501798692692\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.025955020841621115,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.025955020841621115\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.038498560987940904,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.038498560987940904\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7914110429447853,\n \"acc_stderr\": 0.031921934489347235,\n \"acc_norm\": 0.7914110429447853,\n \"acc_norm_stderr\": 0.031921934489347235\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8675213675213675,\n \"acc_stderr\": 0.022209309073165612,\n \"acc_norm\": 0.8675213675213675,\n \"acc_norm_stderr\": 0.022209309073165612\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8301404853128991,\n \"acc_stderr\": 0.013428186370608306,\n \"acc_norm\": 0.8301404853128991,\n \"acc_norm_stderr\": 0.013428186370608306\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7341040462427746,\n \"acc_stderr\": 0.02378620325550829,\n \"acc_norm\": 0.7341040462427746,\n \"acc_norm_stderr\": 0.02378620325550829\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4324022346368715,\n \"acc_stderr\": 0.01656897123354861,\n \"acc_norm\": 0.4324022346368715,\n \"acc_norm_stderr\": 0.01656897123354861\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.025553169991826524,\n \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.025553169991826524\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.02558306248998481,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.02558306248998481\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47196870925684486,\n \"acc_stderr\": 0.012750151802922435,\n \"acc_norm\": 0.47196870925684486,\n \"acc_norm_stderr\": 0.012750151802922435\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6691176470588235,\n \"acc_stderr\": 0.02858270975389845,\n \"acc_norm\": 0.6691176470588235,\n \"acc_norm_stderr\": 0.02858270975389845\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.673202614379085,\n \"acc_stderr\": 0.018975427920507208,\n \"acc_norm\": 0.673202614379085,\n \"acc_norm_stderr\": 0.018975427920507208\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.0289205832206756,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.0289205832206756\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5973072215422277,\n \"mc1_stderr\": 0.01716883093518721,\n \"mc2\": 0.7214236695613149,\n \"mc2_stderr\": 0.01480035939963425\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8326756116811366,\n \"acc_stderr\": 0.010490608806828075\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6899166034874905,\n \"acc_stderr\": 0.012740305717376268\n }\n}\n```", "repo_url": "https://huggingface.co/silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|arc:challenge|25_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|arc:challenge|25_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|gsm8k|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|gsm8k|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hellaswag|10_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hellaswag|10_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T20-22-32.562493.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T04-48-23.031751.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["**/details_harness|winogrande|5_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["**/details_harness|winogrande|5_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T04-48-23.031751.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T20_22_32.562493", "path": ["results_2024-01-22T20-22-32.562493.parquet"]}, {"split": "2024_01_23T04_48_23.031751", "path": ["results_2024-01-23T04-48-23.031751.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T04-48-23.031751.parquet"]}]}]}
2024-01-23T04:51:01+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2 Dataset automatically created during the evaluation run of model silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T04:48:23.031751(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2\n\n\n\nDataset automatically created during the evaluation run of model silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T04:48:23.031751(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2\n\n\n\nDataset automatically created during the evaluation run of model silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T04:48:23.031751(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
794c279a5b2ac7308718e3789c817a2c92ec0fbd
# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3](https://huggingface.co/silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T20:28:36.623759](https://huggingface.co/datasets/open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e3/blob/main/results_2024-01-22T20-28-36.623759.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6056055075713813, "acc_stderr": 0.033207404628966465, "acc_norm": 0.6099747188919664, "acc_norm_stderr": 0.033882373852914933, "mc1": 0.5569155446756426, "mc1_stderr": 0.017389730346877113, "mc2": 0.7068986087180347, "mc2_stderr": 0.015043727229165492 }, "harness|arc:challenge|25": { "acc": 0.590443686006826, "acc_stderr": 0.014370358632472435, "acc_norm": 0.6254266211604096, "acc_norm_stderr": 0.014144193471893454 }, "harness|hellaswag|10": { "acc": 0.6748655646285601, "acc_stderr": 0.004674677287148619, "acc_norm": 0.853415654252141, "acc_norm_stderr": 0.0035296822858572425 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5777777777777777, "acc_stderr": 0.04266763404099582, "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.618421052631579, "acc_stderr": 0.03953173377749194, "acc_norm": 0.618421052631579, "acc_norm_stderr": 0.03953173377749194 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6716981132075471, "acc_stderr": 0.02890159361241178, "acc_norm": 0.6716981132075471, "acc_norm_stderr": 0.02890159361241178 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6875, "acc_stderr": 0.038760854559127644, "acc_norm": 0.6875, "acc_norm_stderr": 0.038760854559127644 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5895953757225434, "acc_stderr": 0.03750757044895536, "acc_norm": 0.5895953757225434, "acc_norm_stderr": 0.03750757044895536 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5234042553191489, "acc_stderr": 0.03265019475033582, "acc_norm": 0.5234042553191489, "acc_norm_stderr": 0.03265019475033582 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.43859649122807015, "acc_stderr": 0.04668000738510455, "acc_norm": 0.43859649122807015, "acc_norm_stderr": 0.04668000738510455 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6137931034482759, "acc_stderr": 0.04057324734419035, "acc_norm": 0.6137931034482759, "acc_norm_stderr": 0.04057324734419035 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.38095238095238093, "acc_stderr": 0.025010749116137602, "acc_norm": 0.38095238095238093, "acc_norm_stderr": 0.025010749116137602 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.603225806451613, "acc_stderr": 0.027831231605767944, "acc_norm": 0.603225806451613, "acc_norm_stderr": 0.027831231605767944 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001974, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7454545454545455, "acc_stderr": 0.03401506715249039, "acc_norm": 0.7454545454545455, "acc_norm_stderr": 0.03401506715249039 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7474747474747475, "acc_stderr": 0.03095405547036589, "acc_norm": 0.7474747474747475, "acc_norm_stderr": 0.03095405547036589 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8497409326424871, "acc_stderr": 0.02578772318072387, "acc_norm": 0.8497409326424871, "acc_norm_stderr": 0.02578772318072387 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5666666666666667, "acc_stderr": 0.025124653525885117, "acc_norm": 0.5666666666666667, "acc_norm_stderr": 0.025124653525885117 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.027940457136228395, "acc_norm": 0.3, "acc_norm_stderr": 0.027940457136228395 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6638655462184874, "acc_stderr": 0.030684737115135367, "acc_norm": 0.6638655462184874, "acc_norm_stderr": 0.030684737115135367 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.0386155754625517, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.0386155754625517 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7908256880733945, "acc_stderr": 0.017437937173343233, "acc_norm": 0.7908256880733945, "acc_norm_stderr": 0.017437937173343233 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4351851851851852, "acc_stderr": 0.03381200005643525, "acc_norm": 0.4351851851851852, "acc_norm_stderr": 0.03381200005643525 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7843137254901961, "acc_stderr": 0.028867431449849316, "acc_norm": 0.7843137254901961, "acc_norm_stderr": 0.028867431449849316 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7510548523206751, "acc_stderr": 0.028146970599422644, "acc_norm": 0.7510548523206751, "acc_norm_stderr": 0.028146970599422644 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6188340807174888, "acc_stderr": 0.03259625118416827, "acc_norm": 0.6188340807174888, "acc_norm_stderr": 0.03259625118416827 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.732824427480916, "acc_stderr": 0.038808483010823944, "acc_norm": 0.732824427480916, "acc_norm_stderr": 0.038808483010823944 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.042844679680521934, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.042844679680521934 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7378640776699029, "acc_stderr": 0.04354631077260594, "acc_norm": 0.7378640776699029, "acc_norm_stderr": 0.04354631077260594 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597552, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597552 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.776500638569604, "acc_stderr": 0.01489723522945071, "acc_norm": 0.776500638569604, "acc_norm_stderr": 0.01489723522945071 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6820809248554913, "acc_stderr": 0.025070713719153193, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.025070713719153193 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.30614525139664805, "acc_stderr": 0.015414494487903227, "acc_norm": 0.30614525139664805, "acc_norm_stderr": 0.015414494487903227 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6797385620915033, "acc_stderr": 0.02671611838015685, "acc_norm": 0.6797385620915033, "acc_norm_stderr": 0.02671611838015685 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6816720257234726, "acc_stderr": 0.026457225067811025, "acc_norm": 0.6816720257234726, "acc_norm_stderr": 0.026457225067811025 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6975308641975309, "acc_stderr": 0.025557653981868045, "acc_norm": 0.6975308641975309, "acc_norm_stderr": 0.025557653981868045 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4716312056737589, "acc_stderr": 0.029779450957303062, "acc_norm": 0.4716312056737589, "acc_norm_stderr": 0.029779450957303062 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.43546284224250326, "acc_stderr": 0.012663412101248333, "acc_norm": 0.43546284224250326, "acc_norm_stderr": 0.012663412101248333 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6176470588235294, "acc_stderr": 0.02952009569768776, "acc_norm": 0.6176470588235294, "acc_norm_stderr": 0.02952009569768776 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6339869281045751, "acc_stderr": 0.019488025745529672, "acc_norm": 0.6339869281045751, "acc_norm_stderr": 0.019488025745529672 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7181818181818181, "acc_stderr": 0.043091187099464585, "acc_norm": 0.7181818181818181, "acc_norm_stderr": 0.043091187099464585 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7061224489795919, "acc_stderr": 0.029162738410249765, "acc_norm": 0.7061224489795919, "acc_norm_stderr": 0.029162738410249765 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6865671641791045, "acc_stderr": 0.03280188205348643, "acc_norm": 0.6865671641791045, "acc_norm_stderr": 0.03280188205348643 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.81, "acc_stderr": 0.039427724440366255, "acc_norm": 0.81, "acc_norm_stderr": 0.039427724440366255 }, "harness|hendrycksTest-virology|5": { "acc": 0.5, "acc_stderr": 0.03892494720807614, "acc_norm": 0.5, "acc_norm_stderr": 0.03892494720807614 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.5569155446756426, "mc1_stderr": 0.017389730346877113, "mc2": 0.7068986087180347, "mc2_stderr": 0.015043727229165492 }, "harness|winogrande|5": { "acc": 0.7734806629834254, "acc_stderr": 0.011764149054698341 }, "harness|gsm8k|5": { "acc": 0.3934799090219864, "acc_stderr": 0.013456315828404581 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e3
[ "region:us" ]
2024-01-22T20:30:53+00:00
{"pretty_name": "Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3", "dataset_summary": "Dataset automatically created during the evaluation run of model [silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3](https://huggingface.co/silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T20:28:36.623759](https://huggingface.co/datasets/open-llm-leaderboard/details_silvercoder67__Mistral-7b-instruct-v0.2-summ-sft-dpo-e3/blob/main/results_2024-01-22T20-28-36.623759.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6056055075713813,\n \"acc_stderr\": 0.033207404628966465,\n \"acc_norm\": 0.6099747188919664,\n \"acc_norm_stderr\": 0.033882373852914933,\n \"mc1\": 0.5569155446756426,\n \"mc1_stderr\": 0.017389730346877113,\n \"mc2\": 0.7068986087180347,\n \"mc2_stderr\": 0.015043727229165492\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.590443686006826,\n \"acc_stderr\": 0.014370358632472435,\n \"acc_norm\": 0.6254266211604096,\n \"acc_norm_stderr\": 0.014144193471893454\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6748655646285601,\n \"acc_stderr\": 0.004674677287148619,\n \"acc_norm\": 0.853415654252141,\n \"acc_norm_stderr\": 0.0035296822858572425\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.618421052631579,\n \"acc_stderr\": 0.03953173377749194,\n \"acc_norm\": 0.618421052631579,\n \"acc_norm_stderr\": 0.03953173377749194\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6716981132075471,\n \"acc_stderr\": 0.02890159361241178,\n \"acc_norm\": 0.6716981132075471,\n \"acc_norm_stderr\": 0.02890159361241178\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.038760854559127644,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.038760854559127644\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5895953757225434,\n \"acc_stderr\": 0.03750757044895536,\n \"acc_norm\": 0.5895953757225434,\n \"acc_norm_stderr\": 0.03750757044895536\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5234042553191489,\n \"acc_stderr\": 0.03265019475033582,\n \"acc_norm\": 0.5234042553191489,\n \"acc_norm_stderr\": 0.03265019475033582\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.43859649122807015,\n \"acc_stderr\": 0.04668000738510455,\n \"acc_norm\": 0.43859649122807015,\n \"acc_norm_stderr\": 0.04668000738510455\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6137931034482759,\n \"acc_stderr\": 0.04057324734419035,\n \"acc_norm\": 0.6137931034482759,\n \"acc_norm_stderr\": 0.04057324734419035\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.38095238095238093,\n \"acc_stderr\": 0.025010749116137602,\n \"acc_norm\": 0.38095238095238093,\n \"acc_norm_stderr\": 0.025010749116137602\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.603225806451613,\n \"acc_stderr\": 0.027831231605767944,\n \"acc_norm\": 0.603225806451613,\n \"acc_norm_stderr\": 0.027831231605767944\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7454545454545455,\n \"acc_stderr\": 0.03401506715249039,\n \"acc_norm\": 0.7454545454545455,\n \"acc_norm_stderr\": 0.03401506715249039\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7474747474747475,\n \"acc_stderr\": 0.03095405547036589,\n \"acc_norm\": 0.7474747474747475,\n \"acc_norm_stderr\": 0.03095405547036589\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8497409326424871,\n \"acc_stderr\": 0.02578772318072387,\n \"acc_norm\": 0.8497409326424871,\n \"acc_norm_stderr\": 0.02578772318072387\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5666666666666667,\n \"acc_stderr\": 0.025124653525885117,\n \"acc_norm\": 0.5666666666666667,\n \"acc_norm_stderr\": 0.025124653525885117\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.027940457136228395,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.027940457136228395\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6638655462184874,\n \"acc_stderr\": 0.030684737115135367,\n \"acc_norm\": 0.6638655462184874,\n \"acc_norm_stderr\": 0.030684737115135367\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.0386155754625517,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.0386155754625517\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7908256880733945,\n \"acc_stderr\": 0.017437937173343233,\n \"acc_norm\": 0.7908256880733945,\n \"acc_norm_stderr\": 0.017437937173343233\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4351851851851852,\n \"acc_stderr\": 0.03381200005643525,\n \"acc_norm\": 0.4351851851851852,\n \"acc_norm_stderr\": 0.03381200005643525\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7843137254901961,\n \"acc_stderr\": 0.028867431449849316,\n \"acc_norm\": 0.7843137254901961,\n \"acc_norm_stderr\": 0.028867431449849316\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7510548523206751,\n \"acc_stderr\": 0.028146970599422644,\n \"acc_norm\": 0.7510548523206751,\n \"acc_norm_stderr\": 0.028146970599422644\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6188340807174888,\n \"acc_stderr\": 0.03259625118416827,\n \"acc_norm\": 0.6188340807174888,\n \"acc_norm_stderr\": 0.03259625118416827\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.732824427480916,\n \"acc_stderr\": 0.038808483010823944,\n \"acc_norm\": 0.732824427480916,\n \"acc_norm_stderr\": 0.038808483010823944\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7378640776699029,\n \"acc_stderr\": 0.04354631077260594,\n \"acc_norm\": 0.7378640776699029,\n \"acc_norm_stderr\": 0.04354631077260594\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597552,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597552\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.776500638569604,\n \"acc_stderr\": 0.01489723522945071,\n \"acc_norm\": 0.776500638569604,\n \"acc_norm_stderr\": 0.01489723522945071\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.025070713719153193,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.025070713719153193\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.30614525139664805,\n \"acc_stderr\": 0.015414494487903227,\n \"acc_norm\": 0.30614525139664805,\n \"acc_norm_stderr\": 0.015414494487903227\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.02671611838015685,\n \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.02671611838015685\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6816720257234726,\n \"acc_stderr\": 0.026457225067811025,\n \"acc_norm\": 0.6816720257234726,\n \"acc_norm_stderr\": 0.026457225067811025\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6975308641975309,\n \"acc_stderr\": 0.025557653981868045,\n \"acc_norm\": 0.6975308641975309,\n \"acc_norm_stderr\": 0.025557653981868045\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4716312056737589,\n \"acc_stderr\": 0.029779450957303062,\n \"acc_norm\": 0.4716312056737589,\n \"acc_norm_stderr\": 0.029779450957303062\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.43546284224250326,\n \"acc_stderr\": 0.012663412101248333,\n \"acc_norm\": 0.43546284224250326,\n \"acc_norm_stderr\": 0.012663412101248333\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6176470588235294,\n \"acc_stderr\": 0.02952009569768776,\n \"acc_norm\": 0.6176470588235294,\n \"acc_norm_stderr\": 0.02952009569768776\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6339869281045751,\n \"acc_stderr\": 0.019488025745529672,\n \"acc_norm\": 0.6339869281045751,\n \"acc_norm_stderr\": 0.019488025745529672\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7181818181818181,\n \"acc_stderr\": 0.043091187099464585,\n \"acc_norm\": 0.7181818181818181,\n \"acc_norm_stderr\": 0.043091187099464585\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7061224489795919,\n \"acc_stderr\": 0.029162738410249765,\n \"acc_norm\": 0.7061224489795919,\n \"acc_norm_stderr\": 0.029162738410249765\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6865671641791045,\n \"acc_stderr\": 0.03280188205348643,\n \"acc_norm\": 0.6865671641791045,\n \"acc_norm_stderr\": 0.03280188205348643\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366255,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366255\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.03892494720807614,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.03892494720807614\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5569155446756426,\n \"mc1_stderr\": 0.017389730346877113,\n \"mc2\": 0.7068986087180347,\n \"mc2_stderr\": 0.015043727229165492\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7734806629834254,\n \"acc_stderr\": 0.011764149054698341\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3934799090219864,\n \"acc_stderr\": 0.013456315828404581\n }\n}\n```", "repo_url": "https://huggingface.co/silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|arc:challenge|25_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|gsm8k|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hellaswag|10_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T20-28-36.623759.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["**/details_harness|winogrande|5_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T20-28-36.623759.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T20_28_36.623759", "path": ["results_2024-01-22T20-28-36.623759.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T20-28-36.623759.parquet"]}]}]}
2024-01-22T20:31:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3 Dataset automatically created during the evaluation run of model silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T20:28:36.623759(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3\n\n\n\nDataset automatically created during the evaluation run of model silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T20:28:36.623759(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3\n\n\n\nDataset automatically created during the evaluation run of model silvercoder67/Mistral-7b-instruct-v0.2-summ-sft-dpo-e3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T20:28:36.623759(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
2444ce97c2d8f1cb0a67e6609b88523f88bbc52a
# Dataset Card for Evaluation run of freeCS-dot-org/ThetaWave-7B-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [freeCS-dot-org/ThetaWave-7B-v0.1](https://huggingface.co/freeCS-dot-org/ThetaWave-7B-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_freeCS-dot-org__ThetaWave-7B-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T20:31:20.560284](https://huggingface.co/datasets/open-llm-leaderboard/details_freeCS-dot-org__ThetaWave-7B-v0.1/blob/main/results_2024-01-22T20-31-20.560284.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6238961084914312, "acc_stderr": 0.03278163983950083, "acc_norm": 0.6262303758307073, "acc_norm_stderr": 0.033437997279802574, "mc1": 0.5520195838433293, "mc1_stderr": 0.01740851306342291, "mc2": 0.7167950682914775, "mc2_stderr": 0.014721015225011548 }, "harness|arc:challenge|25": { "acc": 0.6305460750853242, "acc_stderr": 0.014104578366491894, "acc_norm": 0.6808873720136519, "acc_norm_stderr": 0.013621696119173307 }, "harness|hellaswag|10": { "acc": 0.6763592909778928, "acc_stderr": 0.004669085411342192, "acc_norm": 0.8632742481577375, "acc_norm_stderr": 0.00342855459595022 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6, "acc_stderr": 0.04232073695151589, "acc_norm": 0.6, "acc_norm_stderr": 0.04232073695151589 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7171052631578947, "acc_stderr": 0.03665349695640767, "acc_norm": 0.7171052631578947, "acc_norm_stderr": 0.03665349695640767 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.02863723563980089, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.02863723563980089 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7013888888888888, "acc_stderr": 0.03827052357950756, "acc_norm": 0.7013888888888888, "acc_norm_stderr": 0.03827052357950756 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6184971098265896, "acc_stderr": 0.03703851193099521, "acc_norm": 0.6184971098265896, "acc_norm_stderr": 0.03703851193099521 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107224, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107224 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5446808510638298, "acc_stderr": 0.03255525359340355, "acc_norm": 0.5446808510638298, "acc_norm_stderr": 0.03255525359340355 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.45614035087719296, "acc_stderr": 0.046854730419077895, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.046854730419077895 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3994708994708995, "acc_stderr": 0.025225450284067884, "acc_norm": 0.3994708994708995, "acc_norm_stderr": 0.025225450284067884 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5580645161290323, "acc_stderr": 0.028251557906849734, "acc_norm": 0.5580645161290323, "acc_norm_stderr": 0.028251557906849734 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5221674876847291, "acc_stderr": 0.035145285621750066, "acc_norm": 0.5221674876847291, "acc_norm_stderr": 0.035145285621750066 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.63, "acc_stderr": 0.04852365870939098, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939098 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.03287666758603491, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.03287666758603491 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7676767676767676, "acc_stderr": 0.030088629490217487, "acc_norm": 0.7676767676767676, "acc_norm_stderr": 0.030088629490217487 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8652849740932642, "acc_stderr": 0.02463978909770944, "acc_norm": 0.8652849740932642, "acc_norm_stderr": 0.02463978909770944 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.617948717948718, "acc_stderr": 0.024635549163908234, "acc_norm": 0.617948717948718, "acc_norm_stderr": 0.024635549163908234 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.02840653309060846, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.02840653309060846 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6638655462184874, "acc_stderr": 0.030684737115135356, "acc_norm": 0.6638655462184874, "acc_norm_stderr": 0.030684737115135356 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8275229357798165, "acc_stderr": 0.016197807956848036, "acc_norm": 0.8275229357798165, "acc_norm_stderr": 0.016197807956848036 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.46296296296296297, "acc_stderr": 0.03400603625538272, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.03400603625538272 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7598039215686274, "acc_stderr": 0.02998373305591361, "acc_norm": 0.7598039215686274, "acc_norm_stderr": 0.02998373305591361 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7932489451476793, "acc_stderr": 0.026361651668389094, "acc_norm": 0.7932489451476793, "acc_norm_stderr": 0.026361651668389094 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6636771300448431, "acc_stderr": 0.031708824268455005, "acc_norm": 0.6636771300448431, "acc_norm_stderr": 0.031708824268455005 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.037276735755969126, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.037276735755969126 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7300613496932515, "acc_stderr": 0.03487825168497892, "acc_norm": 0.7300613496932515, "acc_norm_stderr": 0.03487825168497892 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8974358974358975, "acc_stderr": 0.019875655027867447, "acc_norm": 0.8974358974358975, "acc_norm_stderr": 0.019875655027867447 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8122605363984674, "acc_stderr": 0.013964393769899136, "acc_norm": 0.8122605363984674, "acc_norm_stderr": 0.013964393769899136 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069353, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069353 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.48044692737430167, "acc_stderr": 0.016709709877661995, "acc_norm": 0.48044692737430167, "acc_norm_stderr": 0.016709709877661995 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6862745098039216, "acc_stderr": 0.02656892101545715, "acc_norm": 0.6862745098039216, "acc_norm_stderr": 0.02656892101545715 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6816720257234726, "acc_stderr": 0.026457225067811025, "acc_norm": 0.6816720257234726, "acc_norm_stderr": 0.026457225067811025 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7222222222222222, "acc_stderr": 0.024922001168886335, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.024922001168886335 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.475177304964539, "acc_stderr": 0.029790719243829727, "acc_norm": 0.475177304964539, "acc_norm_stderr": 0.029790719243829727 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4634941329856584, "acc_stderr": 0.012736153390214963, "acc_norm": 0.4634941329856584, "acc_norm_stderr": 0.012736153390214963 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6360294117647058, "acc_stderr": 0.02922719246003203, "acc_norm": 0.6360294117647058, "acc_norm_stderr": 0.02922719246003203 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6748366013071896, "acc_stderr": 0.01895088677080632, "acc_norm": 0.6748366013071896, "acc_norm_stderr": 0.01895088677080632 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7, "acc_stderr": 0.04389311454644287, "acc_norm": 0.7, "acc_norm_stderr": 0.04389311454644287 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142773, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142773 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5422885572139303, "acc_stderr": 0.03522865864099598, "acc_norm": 0.5422885572139303, "acc_norm_stderr": 0.03522865864099598 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.038612291966536934, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-virology|5": { "acc": 0.5180722891566265, "acc_stderr": 0.03889951252827216, "acc_norm": 0.5180722891566265, "acc_norm_stderr": 0.03889951252827216 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8538011695906432, "acc_stderr": 0.027097290118070806, "acc_norm": 0.8538011695906432, "acc_norm_stderr": 0.027097290118070806 }, "harness|truthfulqa:mc|0": { "mc1": 0.5520195838433293, "mc1_stderr": 0.01740851306342291, "mc2": 0.7167950682914775, "mc2_stderr": 0.014721015225011548 }, "harness|winogrande|5": { "acc": 0.7908445146014207, "acc_stderr": 0.01143045004588158 }, "harness|gsm8k|5": { "acc": 0.5564821834723275, "acc_stderr": 0.013684327592606165 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_freeCS-dot-org__ThetaWave-7B-v0.1
[ "region:us" ]
2024-01-22T20:33:38+00:00
{"pretty_name": "Evaluation run of freeCS-dot-org/ThetaWave-7B-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [freeCS-dot-org/ThetaWave-7B-v0.1](https://huggingface.co/freeCS-dot-org/ThetaWave-7B-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_freeCS-dot-org__ThetaWave-7B-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T20:31:20.560284](https://huggingface.co/datasets/open-llm-leaderboard/details_freeCS-dot-org__ThetaWave-7B-v0.1/blob/main/results_2024-01-22T20-31-20.560284.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6238961084914312,\n \"acc_stderr\": 0.03278163983950083,\n \"acc_norm\": 0.6262303758307073,\n \"acc_norm_stderr\": 0.033437997279802574,\n \"mc1\": 0.5520195838433293,\n \"mc1_stderr\": 0.01740851306342291,\n \"mc2\": 0.7167950682914775,\n \"mc2_stderr\": 0.014721015225011548\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6305460750853242,\n \"acc_stderr\": 0.014104578366491894,\n \"acc_norm\": 0.6808873720136519,\n \"acc_norm_stderr\": 0.013621696119173307\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6763592909778928,\n \"acc_stderr\": 0.004669085411342192,\n \"acc_norm\": 0.8632742481577375,\n \"acc_norm_stderr\": 0.00342855459595022\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04232073695151589,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04232073695151589\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7171052631578947,\n \"acc_stderr\": 0.03665349695640767,\n \"acc_norm\": 0.7171052631578947,\n \"acc_norm_stderr\": 0.03665349695640767\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7013888888888888,\n \"acc_stderr\": 0.03827052357950756,\n \"acc_norm\": 0.7013888888888888,\n \"acc_norm_stderr\": 0.03827052357950756\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6184971098265896,\n \"acc_stderr\": 0.03703851193099521,\n \"acc_norm\": 0.6184971098265896,\n \"acc_norm_stderr\": 0.03703851193099521\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107224,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107224\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5446808510638298,\n \"acc_stderr\": 0.03255525359340355,\n \"acc_norm\": 0.5446808510638298,\n \"acc_norm_stderr\": 0.03255525359340355\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n \"acc_stderr\": 0.046854730419077895,\n \"acc_norm\": 0.45614035087719296,\n \"acc_norm_stderr\": 0.046854730419077895\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3994708994708995,\n \"acc_stderr\": 0.025225450284067884,\n \"acc_norm\": 0.3994708994708995,\n \"acc_norm_stderr\": 0.025225450284067884\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5580645161290323,\n \"acc_stderr\": 0.028251557906849734,\n \"acc_norm\": 0.5580645161290323,\n \"acc_norm_stderr\": 0.028251557906849734\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5221674876847291,\n \"acc_stderr\": 0.035145285621750066,\n \"acc_norm\": 0.5221674876847291,\n \"acc_norm_stderr\": 0.035145285621750066\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939098,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939098\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.03287666758603491,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.03287666758603491\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7676767676767676,\n \"acc_stderr\": 0.030088629490217487,\n \"acc_norm\": 0.7676767676767676,\n \"acc_norm_stderr\": 0.030088629490217487\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8652849740932642,\n \"acc_stderr\": 0.02463978909770944,\n \"acc_norm\": 0.8652849740932642,\n \"acc_norm_stderr\": 0.02463978909770944\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.617948717948718,\n \"acc_stderr\": 0.024635549163908234,\n \"acc_norm\": 0.617948717948718,\n \"acc_norm_stderr\": 0.024635549163908234\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060846,\n \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060846\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6638655462184874,\n \"acc_stderr\": 0.030684737115135356,\n \"acc_norm\": 0.6638655462184874,\n \"acc_norm_stderr\": 0.030684737115135356\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8275229357798165,\n \"acc_stderr\": 0.016197807956848036,\n \"acc_norm\": 0.8275229357798165,\n \"acc_norm_stderr\": 0.016197807956848036\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.46296296296296297,\n \"acc_stderr\": 0.03400603625538272,\n \"acc_norm\": 0.46296296296296297,\n \"acc_norm_stderr\": 0.03400603625538272\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7598039215686274,\n \"acc_stderr\": 0.02998373305591361,\n \"acc_norm\": 0.7598039215686274,\n \"acc_norm_stderr\": 0.02998373305591361\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7932489451476793,\n \"acc_stderr\": 0.026361651668389094,\n \"acc_norm\": 0.7932489451476793,\n \"acc_norm_stderr\": 0.026361651668389094\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6636771300448431,\n \"acc_stderr\": 0.031708824268455005,\n \"acc_norm\": 0.6636771300448431,\n \"acc_norm_stderr\": 0.031708824268455005\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.037276735755969126,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.037276735755969126\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7300613496932515,\n \"acc_stderr\": 0.03487825168497892,\n \"acc_norm\": 0.7300613496932515,\n \"acc_norm_stderr\": 0.03487825168497892\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8974358974358975,\n \"acc_stderr\": 0.019875655027867447,\n \"acc_norm\": 0.8974358974358975,\n \"acc_norm_stderr\": 0.019875655027867447\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8122605363984674,\n \"acc_stderr\": 0.013964393769899136,\n \"acc_norm\": 0.8122605363984674,\n \"acc_norm_stderr\": 0.013964393769899136\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069353,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069353\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.48044692737430167,\n \"acc_stderr\": 0.016709709877661995,\n \"acc_norm\": 0.48044692737430167,\n \"acc_norm_stderr\": 0.016709709877661995\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6862745098039216,\n \"acc_stderr\": 0.02656892101545715,\n \"acc_norm\": 0.6862745098039216,\n \"acc_norm_stderr\": 0.02656892101545715\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6816720257234726,\n \"acc_stderr\": 0.026457225067811025,\n \"acc_norm\": 0.6816720257234726,\n \"acc_norm_stderr\": 0.026457225067811025\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.024922001168886335,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.024922001168886335\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.475177304964539,\n \"acc_stderr\": 0.029790719243829727,\n \"acc_norm\": 0.475177304964539,\n \"acc_norm_stderr\": 0.029790719243829727\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4634941329856584,\n \"acc_stderr\": 0.012736153390214963,\n \"acc_norm\": 0.4634941329856584,\n \"acc_norm_stderr\": 0.012736153390214963\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6360294117647058,\n \"acc_stderr\": 0.02922719246003203,\n \"acc_norm\": 0.6360294117647058,\n \"acc_norm_stderr\": 0.02922719246003203\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6748366013071896,\n \"acc_stderr\": 0.01895088677080632,\n \"acc_norm\": 0.6748366013071896,\n \"acc_norm_stderr\": 0.01895088677080632\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644287,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644287\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142773,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142773\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5422885572139303,\n \"acc_stderr\": 0.03522865864099598,\n \"acc_norm\": 0.5422885572139303,\n \"acc_norm_stderr\": 0.03522865864099598\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n \"acc_stderr\": 0.03889951252827216,\n \"acc_norm\": 0.5180722891566265,\n \"acc_norm_stderr\": 0.03889951252827216\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8538011695906432,\n \"acc_stderr\": 0.027097290118070806,\n \"acc_norm\": 0.8538011695906432,\n \"acc_norm_stderr\": 0.027097290118070806\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5520195838433293,\n \"mc1_stderr\": 0.01740851306342291,\n \"mc2\": 0.7167950682914775,\n \"mc2_stderr\": 0.014721015225011548\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7908445146014207,\n \"acc_stderr\": 0.01143045004588158\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5564821834723275,\n \"acc_stderr\": 0.013684327592606165\n }\n}\n```", "repo_url": "https://huggingface.co/freeCS-dot-org/ThetaWave-7B-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|arc:challenge|25_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|gsm8k|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hellaswag|10_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T20-31-20.560284.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["**/details_harness|winogrande|5_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T20-31-20.560284.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T20_31_20.560284", "path": ["results_2024-01-22T20-31-20.560284.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T20-31-20.560284.parquet"]}]}]}
2024-01-22T20:34:04+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of freeCS-dot-org/ThetaWave-7B-v0.1 Dataset automatically created during the evaluation run of model freeCS-dot-org/ThetaWave-7B-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T20:31:20.560284(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of freeCS-dot-org/ThetaWave-7B-v0.1\n\n\n\nDataset automatically created during the evaluation run of model freeCS-dot-org/ThetaWave-7B-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T20:31:20.560284(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of freeCS-dot-org/ThetaWave-7B-v0.1\n\n\n\nDataset automatically created during the evaluation run of model freeCS-dot-org/ThetaWave-7B-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T20:31:20.560284(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
4a82986d1104b3647e31634d609d22114de5f089
This synthetic dataset was generated using the Open DataGen Python library. (https://github.com/thoddnn/open-datagen) # Methodology: 1) Retrieve random article content from the HuggingFace Wikipedia English dataset. 2) Construct a Chain of Thought (CoT) to generate a Multiple Choice Question (MCQ). 3) Utilize a Large Language Model (LLM) to score the results then filter it. All these steps are prompted in the 'template.json' file located in the specified code folder. Code: https://github.com/thoddnn/open-datagen/blob/main/opendatagen/examples/opendata-eval/ Feel free to reach me on Linkedin (https://www.linkedin.com/in/thomasdordonne/) or Twitter (https://twitter.com/thoDdnn)
thoddnn/OpenDataGen-factuality-en-v0.1
[ "task_categories:question-answering", "size_categories:n<1K", "language:en", "license:mit", "wikipedia", "synthetic data", "region:us" ]
2024-01-22T20:56:37+00:00
{"language": ["en"], "license": "mit", "size_categories": ["n<1K"], "task_categories": ["question-answering"], "tags": ["wikipedia", "synthetic data"]}
2024-01-23T11:56:00+00:00
[]
[ "en" ]
TAGS #task_categories-question-answering #size_categories-n<1K #language-English #license-mit #wikipedia #synthetic data #region-us
This synthetic dataset was generated using the Open DataGen Python library. (URL # Methodology: 1) Retrieve random article content from the HuggingFace Wikipedia English dataset. 2) Construct a Chain of Thought (CoT) to generate a Multiple Choice Question (MCQ). 3) Utilize a Large Language Model (LLM) to score the results then filter it. All these steps are prompted in the 'URL' file located in the specified code folder. Code: URL Feel free to reach me on Linkedin (URL or Twitter (URL
[ "# Methodology:\n\n1) Retrieve random article content from the HuggingFace Wikipedia English dataset.\n2) Construct a Chain of Thought (CoT) to generate a Multiple Choice Question (MCQ).\n3) Utilize a Large Language Model (LLM) to score the results then filter it.\n\nAll these steps are prompted in the 'URL' file located in the specified code folder.\n\nCode: URL\n\nFeel free to reach me on Linkedin (URL or Twitter (URL" ]
[ "TAGS\n#task_categories-question-answering #size_categories-n<1K #language-English #license-mit #wikipedia #synthetic data #region-us \n", "# Methodology:\n\n1) Retrieve random article content from the HuggingFace Wikipedia English dataset.\n2) Construct a Chain of Thought (CoT) to generate a Multiple Choice Question (MCQ).\n3) Utilize a Large Language Model (LLM) to score the results then filter it.\n\nAll these steps are prompted in the 'URL' file located in the specified code folder.\n\nCode: URL\n\nFeel free to reach me on Linkedin (URL or Twitter (URL" ]
015100b15107dddc9ef492f7203021e4957d20e4
# Dataset Card for Evaluation run of RatanRohith/NeuralPizza-7B-Merge-Slerp <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [RatanRohith/NeuralPizza-7B-Merge-Slerp](https://huggingface.co/RatanRohith/NeuralPizza-7B-Merge-Slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_RatanRohith__NeuralPizza-7B-Merge-Slerp", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T21:47:50.776941](https://huggingface.co/datasets/open-llm-leaderboard/details_RatanRohith__NeuralPizza-7B-Merge-Slerp/blob/main/results_2024-01-22T21-47-50.776941.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.23196194129343728, "acc_stderr": 0.029934654752561563, "acc_norm": 0.2314240573187148, "acc_norm_stderr": 0.03071122006512167, "mc1": 1.0, "mc1_stderr": 0.0, "mc2": NaN, "mc2_stderr": NaN }, "harness|arc:challenge|25": { "acc": 0.22696245733788395, "acc_stderr": 0.012240491536132861, "acc_norm": 0.22696245733788395, "acc_norm_stderr": 0.012240491536132861 }, "harness|hellaswag|10": { "acc": 0.2504481179047998, "acc_stderr": 0.004323856300539177, "acc_norm": 0.2504481179047998, "acc_norm_stderr": 0.004323856300539177 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20256410256410257, "acc_stderr": 0.020377660970371372, "acc_norm": 0.20256410256410257, "acc_norm_stderr": 0.020377660970371372 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.02974504857267404, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.02974504857267404 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 1.0, "mc1_stderr": 0.0, "mc2": NaN, "mc2_stderr": NaN }, "harness|winogrande|5": { "acc": 0.4956590370955012, "acc_stderr": 0.014051956064076911 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_RatanRohith__NeuralPizza-7B-Merge-Slerp
[ "region:us" ]
2024-01-22T21:50:11+00:00
{"pretty_name": "Evaluation run of RatanRohith/NeuralPizza-7B-Merge-Slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [RatanRohith/NeuralPizza-7B-Merge-Slerp](https://huggingface.co/RatanRohith/NeuralPizza-7B-Merge-Slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_RatanRohith__NeuralPizza-7B-Merge-Slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T21:47:50.776941](https://huggingface.co/datasets/open-llm-leaderboard/details_RatanRohith__NeuralPizza-7B-Merge-Slerp/blob/main/results_2024-01-22T21-47-50.776941.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23196194129343728,\n \"acc_stderr\": 0.029934654752561563,\n \"acc_norm\": 0.2314240573187148,\n \"acc_norm_stderr\": 0.03071122006512167,\n \"mc1\": 1.0,\n \"mc1_stderr\": 0.0,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.22696245733788395,\n \"acc_stderr\": 0.012240491536132861,\n \"acc_norm\": 0.22696245733788395,\n \"acc_norm_stderr\": 0.012240491536132861\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2504481179047998,\n \"acc_stderr\": 0.004323856300539177,\n \"acc_norm\": 0.2504481179047998,\n \"acc_norm_stderr\": 0.004323856300539177\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 1.0,\n \"mc1_stderr\": 0.0,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.4956590370955012,\n \"acc_stderr\": 0.014051956064076911\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/RatanRohith/NeuralPizza-7B-Merge-Slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|arc:challenge|25_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|gsm8k|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hellaswag|10_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T21-47-50.776941.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["**/details_harness|winogrande|5_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T21-47-50.776941.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T21_47_50.776941", "path": ["results_2024-01-22T21-47-50.776941.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T21-47-50.776941.parquet"]}]}]}
2024-01-22T21:50:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of RatanRohith/NeuralPizza-7B-Merge-Slerp Dataset automatically created during the evaluation run of model RatanRohith/NeuralPizza-7B-Merge-Slerp on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T21:47:50.776941(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of RatanRohith/NeuralPizza-7B-Merge-Slerp\n\n\n\nDataset automatically created during the evaluation run of model RatanRohith/NeuralPizza-7B-Merge-Slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T21:47:50.776941(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of RatanRohith/NeuralPizza-7B-Merge-Slerp\n\n\n\nDataset automatically created during the evaluation run of model RatanRohith/NeuralPizza-7B-Merge-Slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T21:47:50.776941(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
3e67e5e6c285f00a17ecd7efde817ab150a811f5
# Comma Car Segments public dataset of sanetized car segments for each car that [openpilot](https://github.com/commaai/openpilot) supports, only containing can data, pandaStates, and carParams. This allows you to perform validation across a large database when doing car ports and making changes for particular platforms.
commaai/commaCarSegments
[ "comma", "openpilot", "region:us" ]
2024-01-22T22:00:09+00:00
{"pretty_name": "Comma Car Segments", "tags": ["comma", "openpilot"]}
2024-01-27T07:16:09+00:00
[]
[]
TAGS #comma #openpilot #region-us
# Comma Car Segments public dataset of sanetized car segments for each car that openpilot supports, only containing can data, pandaStates, and carParams. This allows you to perform validation across a large database when doing car ports and making changes for particular platforms.
[ "# Comma Car Segments\n\npublic dataset of sanetized car segments for each car that openpilot supports, only containing can data, pandaStates, and carParams. This allows you to perform validation across a large database when doing car ports and making changes for particular platforms." ]
[ "TAGS\n#comma #openpilot #region-us \n", "# Comma Car Segments\n\npublic dataset of sanetized car segments for each car that openpilot supports, only containing can data, pandaStates, and carParams. This allows you to perform validation across a large database when doing car ports and making changes for particular platforms." ]
38e9d124366098d3a47c7ed7c0394b36d9789e2e
# Dataset Card for Evaluation run of ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000](https://huggingface.co/ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ewqr2130__7B_ppo_phiRM_2GPU_3e-7step_4000", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T22:10:51.590429](https://huggingface.co/datasets/open-llm-leaderboard/details_ewqr2130__7B_ppo_phiRM_2GPU_3e-7step_4000/blob/main/results_2024-01-22T22-10-51.590429.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5972023812976862, "acc_stderr": 0.033037912443727974, "acc_norm": 0.6035719763948437, "acc_norm_stderr": 0.033725121148129075, "mc1": 0.2778457772337821, "mc1_stderr": 0.015680929364024643, "mc2": 0.41480772935959465, "mc2_stderr": 0.01453565986280891 }, "harness|arc:challenge|25": { "acc": 0.5366894197952219, "acc_stderr": 0.014572000527756989, "acc_norm": 0.5725255972696246, "acc_norm_stderr": 0.014456862944650647 }, "harness|hellaswag|10": { "acc": 0.5994821748655647, "acc_stderr": 0.00489001935602109, "acc_norm": 0.8024297948615814, "acc_norm_stderr": 0.0039735233080143454 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542129, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542129 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.562962962962963, "acc_stderr": 0.04284958639753401, "acc_norm": 0.562962962962963, "acc_norm_stderr": 0.04284958639753401 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6447368421052632, "acc_stderr": 0.03894734487013317, "acc_norm": 0.6447368421052632, "acc_norm_stderr": 0.03894734487013317 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6679245283018868, "acc_stderr": 0.02898545565233439, "acc_norm": 0.6679245283018868, "acc_norm_stderr": 0.02898545565233439 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6944444444444444, "acc_stderr": 0.03852084696008534, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.03852084696008534 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6242774566473989, "acc_stderr": 0.036928207672648664, "acc_norm": 0.6242774566473989, "acc_norm_stderr": 0.036928207672648664 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.28431372549019607, "acc_stderr": 0.04488482852329017, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.04488482852329017 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036845, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5319148936170213, "acc_stderr": 0.03261936918467382, "acc_norm": 0.5319148936170213, "acc_norm_stderr": 0.03261936918467382 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.40350877192982454, "acc_stderr": 0.046151869625837026, "acc_norm": 0.40350877192982454, "acc_norm_stderr": 0.046151869625837026 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370333, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370333 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3968253968253968, "acc_stderr": 0.02519710107424649, "acc_norm": 0.3968253968253968, "acc_norm_stderr": 0.02519710107424649 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.373015873015873, "acc_stderr": 0.04325506042017086, "acc_norm": 0.373015873015873, "acc_norm_stderr": 0.04325506042017086 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7322580645161291, "acc_stderr": 0.025189006660212385, "acc_norm": 0.7322580645161291, "acc_norm_stderr": 0.025189006660212385 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.62, "acc_stderr": 0.04878317312145633, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7333333333333333, "acc_stderr": 0.03453131801885416, "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.03453131801885416 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7626262626262627, "acc_stderr": 0.030313710538198906, "acc_norm": 0.7626262626262627, "acc_norm_stderr": 0.030313710538198906 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8238341968911918, "acc_stderr": 0.027493504244548057, "acc_norm": 0.8238341968911918, "acc_norm_stderr": 0.027493504244548057 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5846153846153846, "acc_stderr": 0.02498535492310235, "acc_norm": 0.5846153846153846, "acc_norm_stderr": 0.02498535492310235 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.029381620726465073, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.029381620726465073 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6260504201680672, "acc_stderr": 0.03142946637883708, "acc_norm": 0.6260504201680672, "acc_norm_stderr": 0.03142946637883708 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7926605504587156, "acc_stderr": 0.017381415563608674, "acc_norm": 0.7926605504587156, "acc_norm_stderr": 0.017381415563608674 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.033723432716530645, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.033723432716530645 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7352941176470589, "acc_stderr": 0.030964517926923403, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.030964517926923403 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7341772151898734, "acc_stderr": 0.02875679962965834, "acc_norm": 0.7341772151898734, "acc_norm_stderr": 0.02875679962965834 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6636771300448431, "acc_stderr": 0.031708824268455, "acc_norm": 0.6636771300448431, "acc_norm_stderr": 0.031708824268455 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7251908396946565, "acc_stderr": 0.03915345408847834, "acc_norm": 0.7251908396946565, "acc_norm_stderr": 0.03915345408847834 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6993865030674846, "acc_stderr": 0.03602511318806771, "acc_norm": 0.6993865030674846, "acc_norm_stderr": 0.03602511318806771 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8418803418803419, "acc_stderr": 0.023902325549560403, "acc_norm": 0.8418803418803419, "acc_norm_stderr": 0.023902325549560403 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7841634738186463, "acc_stderr": 0.014711684386139956, "acc_norm": 0.7841634738186463, "acc_norm_stderr": 0.014711684386139956 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6820809248554913, "acc_stderr": 0.025070713719153176, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.025070713719153176 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3854748603351955, "acc_stderr": 0.016277927039638193, "acc_norm": 0.3854748603351955, "acc_norm_stderr": 0.016277927039638193 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6764705882352942, "acc_stderr": 0.0267874531119065, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.0267874531119065 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6784565916398714, "acc_stderr": 0.026527724079528872, "acc_norm": 0.6784565916398714, "acc_norm_stderr": 0.026527724079528872 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.654320987654321, "acc_stderr": 0.026462487777001862, "acc_norm": 0.654320987654321, "acc_norm_stderr": 0.026462487777001862 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.42907801418439717, "acc_stderr": 0.02952591430255856, "acc_norm": 0.42907801418439717, "acc_norm_stderr": 0.02952591430255856 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.41916558018252936, "acc_stderr": 0.012602244505788236, "acc_norm": 0.41916558018252936, "acc_norm_stderr": 0.012602244505788236 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5808823529411765, "acc_stderr": 0.029972807170464622, "acc_norm": 0.5808823529411765, "acc_norm_stderr": 0.029972807170464622 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6160130718954249, "acc_stderr": 0.01967580813528152, "acc_norm": 0.6160130718954249, "acc_norm_stderr": 0.01967580813528152 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6816326530612244, "acc_stderr": 0.029822533793982062, "acc_norm": 0.6816326530612244, "acc_norm_stderr": 0.029822533793982062 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8159203980099502, "acc_stderr": 0.027403859410786862, "acc_norm": 0.8159203980099502, "acc_norm_stderr": 0.027403859410786862 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.8, "acc_stderr": 0.04020151261036847, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036847 }, "harness|hendrycksTest-virology|5": { "acc": 0.5120481927710844, "acc_stderr": 0.03891364495835817, "acc_norm": 0.5120481927710844, "acc_norm_stderr": 0.03891364495835817 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.02991312723236804, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.02991312723236804 }, "harness|truthfulqa:mc|0": { "mc1": 0.2778457772337821, "mc1_stderr": 0.015680929364024643, "mc2": 0.41480772935959465, "mc2_stderr": 0.01453565986280891 }, "harness|winogrande|5": { "acc": 0.7632202052091555, "acc_stderr": 0.01194759236520739 }, "harness|gsm8k|5": { "acc": 0.2941622441243366, "acc_stderr": 0.012551285331470156 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_ewqr2130__7B_ppo_phiRM_2GPU_3e-7step_4000
[ "region:us" ]
2024-01-22T22:13:12+00:00
{"pretty_name": "Evaluation run of ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000", "dataset_summary": "Dataset automatically created during the evaluation run of model [ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000](https://huggingface.co/ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ewqr2130__7B_ppo_phiRM_2GPU_3e-7step_4000\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T22:10:51.590429](https://huggingface.co/datasets/open-llm-leaderboard/details_ewqr2130__7B_ppo_phiRM_2GPU_3e-7step_4000/blob/main/results_2024-01-22T22-10-51.590429.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5972023812976862,\n \"acc_stderr\": 0.033037912443727974,\n \"acc_norm\": 0.6035719763948437,\n \"acc_norm_stderr\": 0.033725121148129075,\n \"mc1\": 0.2778457772337821,\n \"mc1_stderr\": 0.015680929364024643,\n \"mc2\": 0.41480772935959465,\n \"mc2_stderr\": 0.01453565986280891\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5366894197952219,\n \"acc_stderr\": 0.014572000527756989,\n \"acc_norm\": 0.5725255972696246,\n \"acc_norm_stderr\": 0.014456862944650647\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5994821748655647,\n \"acc_stderr\": 0.00489001935602109,\n \"acc_norm\": 0.8024297948615814,\n \"acc_norm_stderr\": 0.0039735233080143454\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542129,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542129\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.562962962962963,\n \"acc_stderr\": 0.04284958639753401,\n \"acc_norm\": 0.562962962962963,\n \"acc_norm_stderr\": 0.04284958639753401\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6447368421052632,\n \"acc_stderr\": 0.03894734487013317,\n \"acc_norm\": 0.6447368421052632,\n \"acc_norm_stderr\": 0.03894734487013317\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6679245283018868,\n \"acc_stderr\": 0.02898545565233439,\n \"acc_norm\": 0.6679245283018868,\n \"acc_norm_stderr\": 0.02898545565233439\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.03852084696008534,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.03852084696008534\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6242774566473989,\n \"acc_stderr\": 0.036928207672648664,\n \"acc_norm\": 0.6242774566473989,\n \"acc_norm_stderr\": 0.036928207672648664\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.04488482852329017,\n \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.04488482852329017\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5319148936170213,\n \"acc_stderr\": 0.03261936918467382,\n \"acc_norm\": 0.5319148936170213,\n \"acc_norm_stderr\": 0.03261936918467382\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.40350877192982454,\n \"acc_stderr\": 0.046151869625837026,\n \"acc_norm\": 0.40350877192982454,\n \"acc_norm_stderr\": 0.046151869625837026\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370333,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370333\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3968253968253968,\n \"acc_stderr\": 0.02519710107424649,\n \"acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.02519710107424649\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.373015873015873,\n \"acc_stderr\": 0.04325506042017086,\n \"acc_norm\": 0.373015873015873,\n \"acc_norm_stderr\": 0.04325506042017086\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7322580645161291,\n \"acc_stderr\": 0.025189006660212385,\n \"acc_norm\": 0.7322580645161291,\n \"acc_norm_stderr\": 0.025189006660212385\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.03453131801885416,\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.03453131801885416\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7626262626262627,\n \"acc_stderr\": 0.030313710538198906,\n \"acc_norm\": 0.7626262626262627,\n \"acc_norm_stderr\": 0.030313710538198906\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8238341968911918,\n \"acc_stderr\": 0.027493504244548057,\n \"acc_norm\": 0.8238341968911918,\n \"acc_norm_stderr\": 0.027493504244548057\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5846153846153846,\n \"acc_stderr\": 0.02498535492310235,\n \"acc_norm\": 0.5846153846153846,\n \"acc_norm_stderr\": 0.02498535492310235\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.029381620726465073,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.029381620726465073\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6260504201680672,\n \"acc_stderr\": 0.03142946637883708,\n \"acc_norm\": 0.6260504201680672,\n \"acc_norm_stderr\": 0.03142946637883708\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7926605504587156,\n \"acc_stderr\": 0.017381415563608674,\n \"acc_norm\": 0.7926605504587156,\n \"acc_norm_stderr\": 0.017381415563608674\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.033723432716530645,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.033723432716530645\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.030964517926923403,\n \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.030964517926923403\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7341772151898734,\n \"acc_stderr\": 0.02875679962965834,\n \"acc_norm\": 0.7341772151898734,\n \"acc_norm_stderr\": 0.02875679962965834\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6636771300448431,\n \"acc_stderr\": 0.031708824268455,\n \"acc_norm\": 0.6636771300448431,\n \"acc_norm_stderr\": 0.031708824268455\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7251908396946565,\n \"acc_stderr\": 0.03915345408847834,\n \"acc_norm\": 0.7251908396946565,\n \"acc_norm_stderr\": 0.03915345408847834\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6993865030674846,\n \"acc_stderr\": 0.03602511318806771,\n \"acc_norm\": 0.6993865030674846,\n \"acc_norm_stderr\": 0.03602511318806771\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8418803418803419,\n \"acc_stderr\": 0.023902325549560403,\n \"acc_norm\": 0.8418803418803419,\n \"acc_norm_stderr\": 0.023902325549560403\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7841634738186463,\n \"acc_stderr\": 0.014711684386139956,\n \"acc_norm\": 0.7841634738186463,\n \"acc_norm_stderr\": 0.014711684386139956\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.025070713719153176,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.025070713719153176\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3854748603351955,\n \"acc_stderr\": 0.016277927039638193,\n \"acc_norm\": 0.3854748603351955,\n \"acc_norm_stderr\": 0.016277927039638193\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.0267874531119065,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.0267874531119065\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6784565916398714,\n \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.6784565916398714,\n \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.654320987654321,\n \"acc_stderr\": 0.026462487777001862,\n \"acc_norm\": 0.654320987654321,\n \"acc_norm_stderr\": 0.026462487777001862\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.42907801418439717,\n \"acc_stderr\": 0.02952591430255856,\n \"acc_norm\": 0.42907801418439717,\n \"acc_norm_stderr\": 0.02952591430255856\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41916558018252936,\n \"acc_stderr\": 0.012602244505788236,\n \"acc_norm\": 0.41916558018252936,\n \"acc_norm_stderr\": 0.012602244505788236\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5808823529411765,\n \"acc_stderr\": 0.029972807170464622,\n \"acc_norm\": 0.5808823529411765,\n \"acc_norm_stderr\": 0.029972807170464622\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6160130718954249,\n \"acc_stderr\": 0.01967580813528152,\n \"acc_norm\": 0.6160130718954249,\n \"acc_norm_stderr\": 0.01967580813528152\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6816326530612244,\n \"acc_stderr\": 0.029822533793982062,\n \"acc_norm\": 0.6816326530612244,\n \"acc_norm_stderr\": 0.029822533793982062\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8159203980099502,\n \"acc_stderr\": 0.027403859410786862,\n \"acc_norm\": 0.8159203980099502,\n \"acc_norm_stderr\": 0.027403859410786862\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036847,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036847\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.03891364495835817,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.03891364495835817\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.02991312723236804,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.02991312723236804\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2778457772337821,\n \"mc1_stderr\": 0.015680929364024643,\n \"mc2\": 0.41480772935959465,\n \"mc2_stderr\": 0.01453565986280891\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7632202052091555,\n \"acc_stderr\": 0.01194759236520739\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2941622441243366,\n \"acc_stderr\": 0.012551285331470156\n }\n}\n```", "repo_url": "https://huggingface.co/ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|arc:challenge|25_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|gsm8k|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hellaswag|10_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T22-10-51.590429.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["**/details_harness|winogrande|5_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T22-10-51.590429.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T22_10_51.590429", "path": ["results_2024-01-22T22-10-51.590429.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T22-10-51.590429.parquet"]}]}]}
2024-01-22T22:13:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000 Dataset automatically created during the evaluation run of model ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T22:10:51.590429(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000\n\n\n\nDataset automatically created during the evaluation run of model ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T22:10:51.590429(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000\n\n\n\nDataset automatically created during the evaluation run of model ewqr2130/7B_ppo_phiRM_2GPU_3e-7step_4000 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T22:10:51.590429(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
1d422e83d2860e22a5a3bc3e4e7d25e88afed030
# Dataset Card for "glaive-function-calling-v2-sharegpt" This dataset takes the [glaive/glaive-function-calling-v2](https://huggingface.co/datasets/glaiveai/glaive-function-calling-v2) dataset and formats it with ShareGPT using [Lilac](https://lilacml.com/) The accompanying notebook can be found [here](https://github.com/lilacai/lilac/blob/main/notebooks/GlaiveToShareGPT.ipynb). The original columns "system" and "chat" still exist on the dataset. There are 4 types of roles in the ShareGPT format: - system - user - human - function call The original dataset has a column called 'chat' with the following structure: ``` USER: Hi, I need help with calculating a tip. My bill is $50 and I want to leave a 20% tip. ASSISTANT: Sure, let me calculate that for you. <|endoftext|> ASSISTANT: <functioncall> {"name": "calculate_tip", "arguments": '{"bill_amount": 50, "tip_percentage": 20}'} <|endoftext|> FUNCTION RESPONSE: {"tip_amount": 10} ASSISTANT: Based on the bill amount and the tip percentage you provided, the tip you should leave is $10. <|endoftext|> ``` This dataset is a version that converts it into the ShareGPT format: ``` conversations: [{ 'from': "user", 'value': 'Hi, I need help with calculating a tip. My bill is $50 and I want to leave a 20% tip.' }, { 'from': 'assistant', 'value': 'Sure, let me calculate that for you. <|endoftext|>' }, { 'from': 'assistant', 'value': '<functioncall> {"name": "calculate_tip", "arguments": '{"bill_amount": 50, "tip_percentage": 20}'} <|endoftext|>' }, { 'from': 'assistant', 'value': '<functioncall> {"name": "calculate_tip", "arguments": '{"bill_amount": 50, "tip_percentage": 20}'} <|endoftext|>' }, { 'from': 'function response', 'value': '{"tip_amount": 10}', }, { 'from': assistant', 'value': 'Based on the bill amount and the tip percentage you provided, the tip you should leave is $10. <|endoftext|>' }] ``` [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
lilacai/glaive-function-calling-v2-sharegpt
[ "region:us" ]
2024-01-22T22:32:20+00:00
{"dataset_info": {"features": [{"name": "chat", "dtype": "string"}, {"name": "system", "dtype": "string"}, {"name": "__hfsplit__", "dtype": "string"}, {"name": "conversations", "list": [{"name": "from", "dtype": "string"}, {"name": "value", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 509687903, "num_examples": 112960}], "download_size": 196572451, "dataset_size": 509687903}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-29T23:18:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for "glaive-function-calling-v2-sharegpt" This dataset takes the glaive/glaive-function-calling-v2 dataset and formats it with ShareGPT using Lilac The accompanying notebook can be found here. The original columns "system" and "chat" still exist on the dataset. There are 4 types of roles in the ShareGPT format: - system - user - human - function call The original dataset has a column called 'chat' with the following structure: This dataset is a version that converts it into the ShareGPT format: More Information needed
[ "# Dataset Card for \"glaive-function-calling-v2-sharegpt\"\n\nThis dataset takes the glaive/glaive-function-calling-v2 dataset and formats it with ShareGPT using Lilac\n\nThe accompanying notebook can be found here.\n\nThe original columns \"system\" and \"chat\" still exist on the dataset.\n\nThere are 4 types of roles in the ShareGPT format:\n- system\n- user\n- human\n- function call\n\nThe original dataset has a column called 'chat' with the following structure:\n\n\nThis dataset is a version that converts it into the ShareGPT format:\n\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"glaive-function-calling-v2-sharegpt\"\n\nThis dataset takes the glaive/glaive-function-calling-v2 dataset and formats it with ShareGPT using Lilac\n\nThe accompanying notebook can be found here.\n\nThe original columns \"system\" and \"chat\" still exist on the dataset.\n\nThere are 4 types of roles in the ShareGPT format:\n- system\n- user\n- human\n- function call\n\nThe original dataset has a column called 'chat' with the following structure:\n\n\nThis dataset is a version that converts it into the ShareGPT format:\n\n\nMore Information needed" ]
c90b8d5976bfa3ba23281a4f1d96b927e84797d5
# Dataset Card for Evaluation run of RatanRohith/NeuralPizza-Valor-7B-Merge-slerp <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [RatanRohith/NeuralPizza-Valor-7B-Merge-slerp](https://huggingface.co/RatanRohith/NeuralPizza-Valor-7B-Merge-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_RatanRohith__NeuralPizza-Valor-7B-Merge-slerp", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T23:00:14.356889](https://huggingface.co/datasets/open-llm-leaderboard/details_RatanRohith__NeuralPizza-Valor-7B-Merge-slerp/blob/main/results_2024-01-22T23-00-14.356889.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.23196194129343728, "acc_stderr": 0.029934654752561563, "acc_norm": 0.2314240573187148, "acc_norm_stderr": 0.03071122006512167, "mc1": 1.0, "mc1_stderr": 0.0, "mc2": NaN, "mc2_stderr": NaN }, "harness|arc:challenge|25": { "acc": 0.22696245733788395, "acc_stderr": 0.012240491536132861, "acc_norm": 0.22696245733788395, "acc_norm_stderr": 0.012240491536132861 }, "harness|hellaswag|10": { "acc": 0.2504481179047998, "acc_stderr": 0.004323856300539177, "acc_norm": 0.2504481179047998, "acc_norm_stderr": 0.004323856300539177 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20256410256410257, "acc_stderr": 0.020377660970371372, "acc_norm": 0.20256410256410257, "acc_norm_stderr": 0.020377660970371372 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.02974504857267404, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.02974504857267404 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 1.0, "mc1_stderr": 0.0, "mc2": NaN, "mc2_stderr": NaN }, "harness|winogrande|5": { "acc": 0.4956590370955012, "acc_stderr": 0.014051956064076911 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_RatanRohith__NeuralPizza-Valor-7B-Merge-slerp
[ "region:us" ]
2024-01-22T23:02:34+00:00
{"pretty_name": "Evaluation run of RatanRohith/NeuralPizza-Valor-7B-Merge-slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [RatanRohith/NeuralPizza-Valor-7B-Merge-slerp](https://huggingface.co/RatanRohith/NeuralPizza-Valor-7B-Merge-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_RatanRohith__NeuralPizza-Valor-7B-Merge-slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T23:00:14.356889](https://huggingface.co/datasets/open-llm-leaderboard/details_RatanRohith__NeuralPizza-Valor-7B-Merge-slerp/blob/main/results_2024-01-22T23-00-14.356889.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23196194129343728,\n \"acc_stderr\": 0.029934654752561563,\n \"acc_norm\": 0.2314240573187148,\n \"acc_norm_stderr\": 0.03071122006512167,\n \"mc1\": 1.0,\n \"mc1_stderr\": 0.0,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.22696245733788395,\n \"acc_stderr\": 0.012240491536132861,\n \"acc_norm\": 0.22696245733788395,\n \"acc_norm_stderr\": 0.012240491536132861\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2504481179047998,\n \"acc_stderr\": 0.004323856300539177,\n \"acc_norm\": 0.2504481179047998,\n \"acc_norm_stderr\": 0.004323856300539177\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 1.0,\n \"mc1_stderr\": 0.0,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.4956590370955012,\n \"acc_stderr\": 0.014051956064076911\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/RatanRohith/NeuralPizza-Valor-7B-Merge-slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|arc:challenge|25_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|gsm8k|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hellaswag|10_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T23-00-14.356889.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["**/details_harness|winogrande|5_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T23-00-14.356889.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T23_00_14.356889", "path": ["results_2024-01-22T23-00-14.356889.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T23-00-14.356889.parquet"]}]}]}
2024-01-22T23:02:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of RatanRohith/NeuralPizza-Valor-7B-Merge-slerp Dataset automatically created during the evaluation run of model RatanRohith/NeuralPizza-Valor-7B-Merge-slerp on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T23:00:14.356889(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of RatanRohith/NeuralPizza-Valor-7B-Merge-slerp\n\n\n\nDataset automatically created during the evaluation run of model RatanRohith/NeuralPizza-Valor-7B-Merge-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T23:00:14.356889(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of RatanRohith/NeuralPizza-Valor-7B-Merge-slerp\n\n\n\nDataset automatically created during the evaluation run of model RatanRohith/NeuralPizza-Valor-7B-Merge-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T23:00:14.356889(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
ffc8d3beccd557e7ef9d0605fa669d794f9ae409
Like [MiniCoT](https://huggingface.co/datasets/euclaise/MiniCoT), but even smaller - consists of strategyqa, TAL-SCQ5K, quartz, GSM8K, FLUTE, and MATH.
euclaise/TinyCoT
[ "region:us" ]
2024-01-22T23:06:31+00:00
{"dataset_info": {"features": [{"name": "rationale", "dtype": "string"}, {"name": "target", "dtype": "string"}, {"name": "source", "dtype": "string"}, {"name": "prompt", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 12165121, "num_examples": 27676}], "download_size": 7265286, "dataset_size": 12165121}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-23T02:09:29+00:00
[]
[]
TAGS #region-us
Like MiniCoT, but even smaller - consists of strategyqa, TAL-SCQ5K, quartz, GSM8K, FLUTE, and MATH.
[]
[ "TAGS\n#region-us \n" ]
8f71c88aa99dfa47241caf9de803fd508ee21bf5
This is a dataset made from the scrape of the Dominican reddit r/Dominicanos, basically it is an excel with 3 columns, title, body and comments The /// symbol identifies individual comments The objective of this dataset is to transform it into useful text for fine tuning, such as dividing the text into chunks and generating a pair of questions and answers, etc.
Arconte/Dominican_reddit_raw_corpus
[ "language:es", "license:mit", "region:us" ]
2024-01-22T23:11:28+00:00
{"language": ["es"], "license": "mit"}
2024-01-24T16:40:58+00:00
[]
[ "es" ]
TAGS #language-Spanish #license-mit #region-us
This is a dataset made from the scrape of the Dominican reddit r/Dominicanos, basically it is an excel with 3 columns, title, body and comments The /// symbol identifies individual comments The objective of this dataset is to transform it into useful text for fine tuning, such as dividing the text into chunks and generating a pair of questions and answers, etc.
[]
[ "TAGS\n#language-Spanish #license-mit #region-us \n" ]
db433a5cc5db697a15799a191a99715c03246693
# lilac/glaive-function-calling-v2 This dataset is a [Lilac](http://lilacml.com) processed dataset. Original dataset: [https://huggingface.co/datasets/lilacai/glaive-function-calling-v2-sharegpt](https://huggingface.co/datasets/lilacai/glaive-function-calling-v2-sharegpt) To download the dataset to a local directory: ```bash lilac download lilacai/lilac-glaive-function-calling-v2 ``` or from python with: ```py ll.download("lilacai/lilac-glaive-function-calling-v2") ```
lilacai/lilac-glaive-function-calling-v2
[ "Lilac", "region:us" ]
2024-01-22T23:17:37+00:00
{"tags": ["Lilac"]}
2024-01-23T16:48:55+00:00
[]
[]
TAGS #Lilac #region-us
# lilac/glaive-function-calling-v2 This dataset is a Lilac processed dataset. Original dataset: URL To download the dataset to a local directory: or from python with:
[ "# lilac/glaive-function-calling-v2\nThis dataset is a Lilac processed dataset. Original dataset: URL\n\nTo download the dataset to a local directory:\n\n\n\nor from python with:" ]
[ "TAGS\n#Lilac #region-us \n", "# lilac/glaive-function-calling-v2\nThis dataset is a Lilac processed dataset. Original dataset: URL\n\nTo download the dataset to a local directory:\n\n\n\nor from python with:" ]
791036e70f6982172304b979c8d9bb922552e716
# Dataset Card for Evaluation run of alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA](https://huggingface.co/alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_alexredna__Tukan-1.1B-Chat-reasoning-sft-COLA", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T23:29:11.286981](https://huggingface.co/datasets/open-llm-leaderboard/details_alexredna__Tukan-1.1B-Chat-reasoning-sft-COLA/blob/main/results_2024-01-22T23-29-11.286981.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2548927738380926, "acc_stderr": 0.03072430220180239, "acc_norm": 0.25609104148058187, "acc_norm_stderr": 0.03148388351888373, "mc1": 0.2386780905752754, "mc1_stderr": 0.014922629695456418, "mc2": 0.3825474897236823, "mc2_stderr": 0.013853773787804245 }, "harness|arc:challenge|25": { "acc": 0.30887372013651876, "acc_stderr": 0.013501770929344004, "acc_norm": 0.3412969283276451, "acc_norm_stderr": 0.01385583128749772 }, "harness|hellaswag|10": { "acc": 0.4479187412865963, "acc_stderr": 0.004962638446395995, "acc_norm": 0.5977892850029874, "acc_norm_stderr": 0.004893418929918262 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.04408440022768081, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768081 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.17777777777777778, "acc_stderr": 0.033027898599017176, "acc_norm": 0.17777777777777778, "acc_norm_stderr": 0.033027898599017176 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.19078947368421054, "acc_stderr": 0.03197565821032499, "acc_norm": 0.19078947368421054, "acc_norm_stderr": 0.03197565821032499 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.24150943396226415, "acc_stderr": 0.026341480371118362, "acc_norm": 0.24150943396226415, "acc_norm_stderr": 0.026341480371118362 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.24305555555555555, "acc_stderr": 0.0358687928008034, "acc_norm": 0.24305555555555555, "acc_norm_stderr": 0.0358687928008034 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.042295258468165065, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0326926380614177, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0326926380614177 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179961, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179961 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.23829787234042554, "acc_stderr": 0.02785125297388979, "acc_norm": 0.23829787234042554, "acc_norm_stderr": 0.02785125297388979 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669415, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2482758620689655, "acc_stderr": 0.03600105692727771, "acc_norm": 0.2482758620689655, "acc_norm_stderr": 0.03600105692727771 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2328042328042328, "acc_stderr": 0.02176596167215453, "acc_norm": 0.2328042328042328, "acc_norm_stderr": 0.02176596167215453 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.21428571428571427, "acc_stderr": 0.03670066451047181, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.03670066451047181 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1935483870967742, "acc_stderr": 0.022475258525536057, "acc_norm": 0.1935483870967742, "acc_norm_stderr": 0.022475258525536057 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.21182266009852216, "acc_stderr": 0.02874898368994106, "acc_norm": 0.21182266009852216, "acc_norm_stderr": 0.02874898368994106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.23636363636363636, "acc_stderr": 0.03317505930009179, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.03317505930009179 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.20202020202020202, "acc_stderr": 0.02860620428922988, "acc_norm": 0.20202020202020202, "acc_norm_stderr": 0.02860620428922988 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.24870466321243523, "acc_stderr": 0.03119584087770031, "acc_norm": 0.24870466321243523, "acc_norm_stderr": 0.03119584087770031 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.25384615384615383, "acc_stderr": 0.022066054378726257, "acc_norm": 0.25384615384615383, "acc_norm_stderr": 0.022066054378726257 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25555555555555554, "acc_stderr": 0.026593939101844086, "acc_norm": 0.25555555555555554, "acc_norm_stderr": 0.026593939101844086 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21428571428571427, "acc_stderr": 0.026653531596715494, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.026653531596715494 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436777, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436777 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.23486238532110093, "acc_stderr": 0.018175110510343574, "acc_norm": 0.23486238532110093, "acc_norm_stderr": 0.018175110510343574 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.38425925925925924, "acc_stderr": 0.03317354514310742, "acc_norm": 0.38425925925925924, "acc_norm_stderr": 0.03317354514310742 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.23529411764705882, "acc_stderr": 0.029771775228145628, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.029771775228145628 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.31223628691983124, "acc_stderr": 0.03016513786784701, "acc_norm": 0.31223628691983124, "acc_norm_stderr": 0.03016513786784701 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.34977578475336324, "acc_stderr": 0.03200736719484503, "acc_norm": 0.34977578475336324, "acc_norm_stderr": 0.03200736719484503 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.26717557251908397, "acc_stderr": 0.03880848301082396, "acc_norm": 0.26717557251908397, "acc_norm_stderr": 0.03880848301082396 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.23148148148148148, "acc_stderr": 0.04077494709252626, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.04077494709252626 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.03259177392742177, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.03259177392742177 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2767857142857143, "acc_stderr": 0.042466243366976256, "acc_norm": 0.2767857142857143, "acc_norm_stderr": 0.042466243366976256 }, "harness|hendrycksTest-management|5": { "acc": 0.24271844660194175, "acc_stderr": 0.04245022486384493, "acc_norm": 0.24271844660194175, "acc_norm_stderr": 0.04245022486384493 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2777777777777778, "acc_stderr": 0.029343114798094476, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.029343114798094476 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.27330779054916987, "acc_stderr": 0.01593668106262856, "acc_norm": 0.27330779054916987, "acc_norm_stderr": 0.01593668106262856 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2543352601156069, "acc_stderr": 0.023445826276545546, "acc_norm": 0.2543352601156069, "acc_norm_stderr": 0.023445826276545546 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24022346368715083, "acc_stderr": 0.01428834380392531, "acc_norm": 0.24022346368715083, "acc_norm_stderr": 0.01428834380392531 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2222222222222222, "acc_stderr": 0.023805186524888146, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.023805186524888146 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.27009646302250806, "acc_stderr": 0.02521804037341062, "acc_norm": 0.27009646302250806, "acc_norm_stderr": 0.02521804037341062 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.25617283950617287, "acc_stderr": 0.0242885336377261, "acc_norm": 0.25617283950617287, "acc_norm_stderr": 0.0242885336377261 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.21631205673758866, "acc_stderr": 0.024561720560562803, "acc_norm": 0.21631205673758866, "acc_norm_stderr": 0.024561720560562803 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2405475880052151, "acc_stderr": 0.010916406735478949, "acc_norm": 0.2405475880052151, "acc_norm_stderr": 0.010916406735478949 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.23529411764705882, "acc_stderr": 0.025767252010855966, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.025767252010855966 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2647058823529412, "acc_stderr": 0.01784808957491323, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.01784808957491323 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2636363636363636, "acc_stderr": 0.04220224692971987, "acc_norm": 0.2636363636363636, "acc_norm_stderr": 0.04220224692971987 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.16326530612244897, "acc_stderr": 0.02366169917709862, "acc_norm": 0.16326530612244897, "acc_norm_stderr": 0.02366169917709862 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23880597014925373, "acc_stderr": 0.030147775935409224, "acc_norm": 0.23880597014925373, "acc_norm_stderr": 0.030147775935409224 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-virology|5": { "acc": 0.3433734939759036, "acc_stderr": 0.03696584317010601, "acc_norm": 0.3433734939759036, "acc_norm_stderr": 0.03696584317010601 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.28654970760233917, "acc_stderr": 0.034678266857038266, "acc_norm": 0.28654970760233917, "acc_norm_stderr": 0.034678266857038266 }, "harness|truthfulqa:mc|0": { "mc1": 0.2386780905752754, "mc1_stderr": 0.014922629695456418, "mc2": 0.3825474897236823, "mc2_stderr": 0.013853773787804245 }, "harness|winogrande|5": { "acc": 0.6077348066298343, "acc_stderr": 0.013722400462000885 }, "harness|gsm8k|5": { "acc": 0.013646702047005308, "acc_stderr": 0.003195747075480784 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_alexredna__Tukan-1.1B-Chat-reasoning-sft-COLA
[ "region:us" ]
2024-01-22T23:31:00+00:00
{"pretty_name": "Evaluation run of alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA", "dataset_summary": "Dataset automatically created during the evaluation run of model [alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA](https://huggingface.co/alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_alexredna__Tukan-1.1B-Chat-reasoning-sft-COLA\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T23:29:11.286981](https://huggingface.co/datasets/open-llm-leaderboard/details_alexredna__Tukan-1.1B-Chat-reasoning-sft-COLA/blob/main/results_2024-01-22T23-29-11.286981.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2548927738380926,\n \"acc_stderr\": 0.03072430220180239,\n \"acc_norm\": 0.25609104148058187,\n \"acc_norm_stderr\": 0.03148388351888373,\n \"mc1\": 0.2386780905752754,\n \"mc1_stderr\": 0.014922629695456418,\n \"mc2\": 0.3825474897236823,\n \"mc2_stderr\": 0.013853773787804245\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.30887372013651876,\n \"acc_stderr\": 0.013501770929344004,\n \"acc_norm\": 0.3412969283276451,\n \"acc_norm_stderr\": 0.01385583128749772\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4479187412865963,\n \"acc_stderr\": 0.004962638446395995,\n \"acc_norm\": 0.5977892850029874,\n \"acc_norm_stderr\": 0.004893418929918262\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.17777777777777778,\n \"acc_stderr\": 0.033027898599017176,\n \"acc_norm\": 0.17777777777777778,\n \"acc_norm_stderr\": 0.033027898599017176\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.19078947368421054,\n \"acc_stderr\": 0.03197565821032499,\n \"acc_norm\": 0.19078947368421054,\n \"acc_norm_stderr\": 0.03197565821032499\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.24150943396226415,\n \"acc_stderr\": 0.026341480371118362,\n \"acc_norm\": 0.24150943396226415,\n \"acc_norm_stderr\": 0.026341480371118362\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.24305555555555555,\n \"acc_stderr\": 0.0358687928008034,\n \"acc_norm\": 0.24305555555555555,\n \"acc_norm_stderr\": 0.0358687928008034\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24277456647398843,\n \"acc_stderr\": 0.0326926380614177,\n \"acc_norm\": 0.24277456647398843,\n \"acc_norm_stderr\": 0.0326926380614177\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179961,\n \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179961\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.23829787234042554,\n \"acc_stderr\": 0.02785125297388979,\n \"acc_norm\": 0.23829787234042554,\n \"acc_norm_stderr\": 0.02785125297388979\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.22807017543859648,\n \"acc_stderr\": 0.03947152782669415,\n \"acc_norm\": 0.22807017543859648,\n \"acc_norm_stderr\": 0.03947152782669415\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2482758620689655,\n \"acc_stderr\": 0.03600105692727771,\n \"acc_norm\": 0.2482758620689655,\n \"acc_norm_stderr\": 0.03600105692727771\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2328042328042328,\n \"acc_stderr\": 0.02176596167215453,\n \"acc_norm\": 0.2328042328042328,\n \"acc_norm_stderr\": 0.02176596167215453\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.21428571428571427,\n \"acc_stderr\": 0.03670066451047181,\n \"acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.03670066451047181\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1935483870967742,\n \"acc_stderr\": 0.022475258525536057,\n \"acc_norm\": 0.1935483870967742,\n \"acc_norm_stderr\": 0.022475258525536057\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.21182266009852216,\n \"acc_stderr\": 0.02874898368994106,\n \"acc_norm\": 0.21182266009852216,\n \"acc_norm_stderr\": 0.02874898368994106\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.23636363636363636,\n \"acc_stderr\": 0.03317505930009179,\n \"acc_norm\": 0.23636363636363636,\n \"acc_norm_stderr\": 0.03317505930009179\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.20202020202020202,\n \"acc_stderr\": 0.02860620428922988,\n \"acc_norm\": 0.20202020202020202,\n \"acc_norm_stderr\": 0.02860620428922988\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.24870466321243523,\n \"acc_stderr\": 0.03119584087770031,\n \"acc_norm\": 0.24870466321243523,\n \"acc_norm_stderr\": 0.03119584087770031\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.25384615384615383,\n \"acc_stderr\": 0.022066054378726257,\n \"acc_norm\": 0.25384615384615383,\n \"acc_norm_stderr\": 0.022066054378726257\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.25555555555555554,\n \"acc_stderr\": 0.026593939101844086,\n \"acc_norm\": 0.25555555555555554,\n \"acc_norm_stderr\": 0.026593939101844086\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21428571428571427,\n \"acc_stderr\": 0.026653531596715494,\n \"acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.026653531596715494\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436777,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436777\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.23486238532110093,\n \"acc_stderr\": 0.018175110510343574,\n \"acc_norm\": 0.23486238532110093,\n \"acc_norm_stderr\": 0.018175110510343574\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.38425925925925924,\n \"acc_stderr\": 0.03317354514310742,\n \"acc_norm\": 0.38425925925925924,\n \"acc_norm_stderr\": 0.03317354514310742\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.029771775228145628,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.029771775228145628\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.31223628691983124,\n \"acc_stderr\": 0.03016513786784701,\n \"acc_norm\": 0.31223628691983124,\n \"acc_norm_stderr\": 0.03016513786784701\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.34977578475336324,\n \"acc_stderr\": 0.03200736719484503,\n \"acc_norm\": 0.34977578475336324,\n \"acc_norm_stderr\": 0.03200736719484503\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.26717557251908397,\n \"acc_stderr\": 0.03880848301082396,\n \"acc_norm\": 0.26717557251908397,\n \"acc_norm_stderr\": 0.03880848301082396\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.23148148148148148,\n \"acc_stderr\": 0.04077494709252626,\n \"acc_norm\": 0.23148148148148148,\n \"acc_norm_stderr\": 0.04077494709252626\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.03259177392742177,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.03259177392742177\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2767857142857143,\n \"acc_stderr\": 0.042466243366976256,\n \"acc_norm\": 0.2767857142857143,\n \"acc_norm_stderr\": 0.042466243366976256\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.24271844660194175,\n \"acc_stderr\": 0.04245022486384493,\n \"acc_norm\": 0.24271844660194175,\n \"acc_norm_stderr\": 0.04245022486384493\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.029343114798094476,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.029343114798094476\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.27330779054916987,\n \"acc_stderr\": 0.01593668106262856,\n \"acc_norm\": 0.27330779054916987,\n \"acc_norm_stderr\": 0.01593668106262856\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2543352601156069,\n \"acc_stderr\": 0.023445826276545546,\n \"acc_norm\": 0.2543352601156069,\n \"acc_norm_stderr\": 0.023445826276545546\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24022346368715083,\n \"acc_stderr\": 0.01428834380392531,\n \"acc_norm\": 0.24022346368715083,\n \"acc_norm_stderr\": 0.01428834380392531\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.023805186524888146,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.023805186524888146\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.27009646302250806,\n \"acc_stderr\": 0.02521804037341062,\n \"acc_norm\": 0.27009646302250806,\n \"acc_norm_stderr\": 0.02521804037341062\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.25617283950617287,\n \"acc_stderr\": 0.0242885336377261,\n \"acc_norm\": 0.25617283950617287,\n \"acc_norm_stderr\": 0.0242885336377261\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.21631205673758866,\n \"acc_stderr\": 0.024561720560562803,\n \"acc_norm\": 0.21631205673758866,\n \"acc_norm_stderr\": 0.024561720560562803\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2405475880052151,\n \"acc_stderr\": 0.010916406735478949,\n \"acc_norm\": 0.2405475880052151,\n \"acc_norm_stderr\": 0.010916406735478949\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.025767252010855966,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.025767252010855966\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.01784808957491323,\n \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.01784808957491323\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2636363636363636,\n \"acc_stderr\": 0.04220224692971987,\n \"acc_norm\": 0.2636363636363636,\n \"acc_norm_stderr\": 0.04220224692971987\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.16326530612244897,\n \"acc_stderr\": 0.02366169917709862,\n \"acc_norm\": 0.16326530612244897,\n \"acc_norm_stderr\": 0.02366169917709862\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23880597014925373,\n \"acc_stderr\": 0.030147775935409224,\n \"acc_norm\": 0.23880597014925373,\n \"acc_norm_stderr\": 0.030147775935409224\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3433734939759036,\n \"acc_stderr\": 0.03696584317010601,\n \"acc_norm\": 0.3433734939759036,\n \"acc_norm_stderr\": 0.03696584317010601\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.28654970760233917,\n \"acc_stderr\": 0.034678266857038266,\n \"acc_norm\": 0.28654970760233917,\n \"acc_norm_stderr\": 0.034678266857038266\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2386780905752754,\n \"mc1_stderr\": 0.014922629695456418,\n \"mc2\": 0.3825474897236823,\n \"mc2_stderr\": 0.013853773787804245\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6077348066298343,\n \"acc_stderr\": 0.013722400462000885\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.013646702047005308,\n \"acc_stderr\": 0.003195747075480784\n }\n}\n```", "repo_url": "https://huggingface.co/alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|arc:challenge|25_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|gsm8k|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hellaswag|10_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T23-29-11.286981.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["**/details_harness|winogrande|5_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T23-29-11.286981.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_22T23_29_11.286981", "path": ["results_2024-01-22T23-29-11.286981.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T23-29-11.286981.parquet"]}]}]}
2024-01-22T23:31:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA Dataset automatically created during the evaluation run of model alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T23:29:11.286981(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA\n\n\n\nDataset automatically created during the evaluation run of model alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T23:29:11.286981(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA\n\n\n\nDataset automatically created during the evaluation run of model alexredna/Tukan-1.1B-Chat-reasoning-sft-COLA on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T23:29:11.286981(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
f22f1e1d06cf469efd822b93ac6939892ee88883
# Dataset Card for sentimientos This dataset has been created with [Argilla](https://docs.argilla.io). As shown in the sections below, this dataset can be loaded into Argilla as explained in [Load with Argilla](#load-with-argilla), or used directly with the `datasets` library in [Load with `datasets`](#load-with-datasets). ## Dataset Description - **Homepage:** https://argilla.io - **Repository:** https://github.com/argilla-io/argilla - **Paper:** - **Leaderboard:** - **Point of Contact:** ### Dataset Summary This dataset contains: * A dataset configuration file conforming to the Argilla dataset format named `argilla.yaml`. This configuration file will be used to configure the dataset when using the `FeedbackDataset.from_huggingface` method in Argilla. * Dataset records in a format compatible with HuggingFace `datasets`. These records will be loaded automatically when using `FeedbackDataset.from_huggingface` and can be loaded independently using the `datasets` library via `load_dataset`. * The [annotation guidelines](#annotation-guidelines) that have been used for building and curating the dataset, if they've been defined in Argilla. ### Load with Argilla To load with Argilla, you'll just need to install Argilla as `pip install argilla --upgrade` and then use the following code: ```python import argilla as rg ds = rg.FeedbackDataset.from_huggingface("linceancestral/sentimientos") ``` ### Load with `datasets` To load this dataset with `datasets`, you'll just need to install `datasets` as `pip install datasets --upgrade` and then use the following code: ```python from datasets import load_dataset ds = load_dataset("linceancestral/sentimientos") ``` ### Supported Tasks and Leaderboards This dataset can contain [multiple fields, questions and responses](https://docs.argilla.io/en/latest/conceptual_guides/data_model.html#feedback-dataset) so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the [Dataset Structure section](#dataset-structure). There are no leaderboards associated with this dataset. ### Languages [More Information Needed] ## Dataset Structure ### Data in Argilla The dataset is created in Argilla with: **fields**, **questions**, **suggestions**, **metadata**, **vectors**, and **guidelines**. The **fields** are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions. | Field Name | Title | Type | Required | Markdown | | ---------- | ----- | ---- | -------- | -------- | | text | Text | text | True | False | The **questions** are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label_selection, multi_label_selection, or ranking. | Question Name | Title | Type | Required | Description | Values/Labels | | ------------- | ----- | ---- | -------- | ----------- | ------------- | | sentiment | Sentiment | label_selection | True | N/A | ['positive', 'neutral', 'negative'] | | mixed-emotion | Mixed-emotion | multi_label_selection | True | N/A | ['joy', 'anger', 'sadness', 'fear', 'surprise', 'love'] | The **suggestions** are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending "-suggestion" and "-suggestion-metadata" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with "-suggestion" and the metadata is appended with "-suggestion-metadata". The **metadata** is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the `metadata_properties` defined in the dataset configuration file in `argilla.yaml`. | Metadata Name | Title | Type | Values | Visible for Annotators | | ------------- | ----- | ---- | ------ | ---------------------- | The **guidelines**, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the [annotation guidelines](#annotation-guidelines) section. ### Data Instances An example of a dataset instance in Argilla looks as follows: ```json { "external_id": null, "fields": { "text": "i didnt feel humiliated" }, "metadata": {}, "responses": [], "suggestions": [], "vectors": {} } ``` While the same record in HuggingFace `datasets` looks as follows: ```json { "external_id": null, "metadata": "{}", "mixed-emotion": [], "mixed-emotion-suggestion": null, "mixed-emotion-suggestion-metadata": { "agent": null, "score": null, "type": null }, "sentiment": [], "sentiment-suggestion": null, "sentiment-suggestion-metadata": { "agent": null, "score": null, "type": null }, "text": "i didnt feel humiliated" } ``` ### Data Fields Among the dataset fields, we differentiate between the following: * **Fields:** These are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions. * **text** is of type `text`. * **Questions:** These are the questions that will be asked to the annotators. They can be of different types, such as `RatingQuestion`, `TextQuestion`, `LabelQuestion`, `MultiLabelQuestion`, and `RankingQuestion`. * **sentiment** is of type `label_selection` with the following allowed values ['positive', 'neutral', 'negative']. * **mixed-emotion** is of type `multi_label_selection` with the following allowed values ['joy', 'anger', 'sadness', 'fear', 'surprise', 'love']. * **Suggestions:** As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable. * (optional) **sentiment-suggestion** is of type `label_selection` with the following allowed values ['positive', 'neutral', 'negative']. * (optional) **mixed-emotion-suggestion** is of type `multi_label_selection` with the following allowed values ['joy', 'anger', 'sadness', 'fear', 'surprise', 'love']. Additionally, we also have two more fields that are optional and are the following: * **metadata:** This is an optional field that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the `metadata_properties` defined in the dataset configuration file in `argilla.yaml`. * **external_id:** This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file. ### Data Splits The dataset contains a single split, which is `train`. ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation guidelines Emotion is a dataset of English Twitter messages with six basic emotions: anger, fear, joy, love, sadness, and surprise. #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
linceancestral/sentimientos
[ "size_categories:n<1K", "rlfh", "argilla", "human-feedback", "region:us" ]
2024-01-23T00:06:00+00:00
{"size_categories": "n<1K", "tags": ["rlfh", "argilla", "human-feedback"]}
2024-01-23T20:40:37+00:00
[]
[]
TAGS #size_categories-n<1K #rlfh #argilla #human-feedback #region-us
Dataset Card for sentimientos ============================= This dataset has been created with Argilla. As shown in the sections below, this dataset can be loaded into Argilla as explained in Load with Argilla, or used directly with the 'datasets' library in Load with 'datasets'. Dataset Description ------------------- * Homepage: URL * Repository: URL * Paper: * Leaderboard: * Point of Contact: ### Dataset Summary This dataset contains: * A dataset configuration file conforming to the Argilla dataset format named 'URL'. This configuration file will be used to configure the dataset when using the 'FeedbackDataset.from\_huggingface' method in Argilla. * Dataset records in a format compatible with HuggingFace 'datasets'. These records will be loaded automatically when using 'FeedbackDataset.from\_huggingface' and can be loaded independently using the 'datasets' library via 'load\_dataset'. * The annotation guidelines that have been used for building and curating the dataset, if they've been defined in Argilla. ### Load with Argilla To load with Argilla, you'll just need to install Argilla as 'pip install argilla --upgrade' and then use the following code: ### Load with 'datasets' To load this dataset with 'datasets', you'll just need to install 'datasets' as 'pip install datasets --upgrade' and then use the following code: ### Supported Tasks and Leaderboards This dataset can contain multiple fields, questions and responses so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the Dataset Structure section. There are no leaderboards associated with this dataset. ### Languages Dataset Structure ----------------- ### Data in Argilla The dataset is created in Argilla with: fields, questions, suggestions, metadata, vectors, and guidelines. The fields are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions. The questions are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label\_selection, multi\_label\_selection, or ranking. The suggestions are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending "-suggestion" and "-suggestion-metadata" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with "-suggestion" and the metadata is appended with "-suggestion-metadata". The metadata is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\_properties' defined in the dataset configuration file in 'URL'. The guidelines, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the annotation guidelines section. ### Data Instances An example of a dataset instance in Argilla looks as follows: While the same record in HuggingFace 'datasets' looks as follows: ### Data Fields Among the dataset fields, we differentiate between the following: * Fields: These are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions. + text is of type 'text'. * Questions: These are the questions that will be asked to the annotators. They can be of different types, such as 'RatingQuestion', 'TextQuestion', 'LabelQuestion', 'MultiLabelQuestion', and 'RankingQuestion'. + sentiment is of type 'label\_selection' with the following allowed values ['positive', 'neutral', 'negative']. + mixed-emotion is of type 'multi\_label\_selection' with the following allowed values ['joy', 'anger', 'sadness', 'fear', 'surprise', 'love']. * Suggestions: As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable. + (optional) sentiment-suggestion is of type 'label\_selection' with the following allowed values ['positive', 'neutral', 'negative']. + (optional) mixed-emotion-suggestion is of type 'multi\_label\_selection' with the following allowed values ['joy', 'anger', 'sadness', 'fear', 'surprise', 'love']. Additionally, we also have two more fields that are optional and are the following: * metadata: This is an optional field that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\_properties' defined in the dataset configuration file in 'URL'. * external\_id: This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file. ### Data Splits The dataset contains a single split, which is 'train'. Dataset Creation ---------------- ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation guidelines Emotion is a dataset of English Twitter messages with six basic emotions: anger, fear, joy, love, sadness, and surprise. #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information Considerations for Using the Data --------------------------------- ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations Additional Information ---------------------- ### Dataset Curators ### Licensing Information ### Contributions
[ "### Dataset Summary\n\n\nThis dataset contains:\n\n\n* A dataset configuration file conforming to the Argilla dataset format named 'URL'. This configuration file will be used to configure the dataset when using the 'FeedbackDataset.from\\_huggingface' method in Argilla.\n* Dataset records in a format compatible with HuggingFace 'datasets'. These records will be loaded automatically when using 'FeedbackDataset.from\\_huggingface' and can be loaded independently using the 'datasets' library via 'load\\_dataset'.\n* The annotation guidelines that have been used for building and curating the dataset, if they've been defined in Argilla.", "### Load with Argilla\n\n\nTo load with Argilla, you'll just need to install Argilla as 'pip install argilla --upgrade' and then use the following code:", "### Load with 'datasets'\n\n\nTo load this dataset with 'datasets', you'll just need to install 'datasets' as 'pip install datasets --upgrade' and then use the following code:", "### Supported Tasks and Leaderboards\n\n\nThis dataset can contain multiple fields, questions and responses so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the Dataset Structure section.\n\n\nThere are no leaderboards associated with this dataset.", "### Languages\n\n\nDataset Structure\n-----------------", "### Data in Argilla\n\n\nThe dataset is created in Argilla with: fields, questions, suggestions, metadata, vectors, and guidelines.\n\n\nThe fields are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\nThe questions are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label\\_selection, multi\\_label\\_selection, or ranking.\n\n\n\nThe suggestions are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending \"-suggestion\" and \"-suggestion-metadata\" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with \"-suggestion\" and the metadata is appended with \"-suggestion-metadata\".\n\n\nThe metadata is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n\n\n\nThe guidelines, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the annotation guidelines section.", "### Data Instances\n\n\nAn example of a dataset instance in Argilla looks as follows:\n\n\nWhile the same record in HuggingFace 'datasets' looks as follows:", "### Data Fields\n\n\nAmong the dataset fields, we differentiate between the following:\n\n\n* Fields: These are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\t+ text is of type 'text'.\n* Questions: These are the questions that will be asked to the annotators. They can be of different types, such as 'RatingQuestion', 'TextQuestion', 'LabelQuestion', 'MultiLabelQuestion', and 'RankingQuestion'.\n\n\n\t+ sentiment is of type 'label\\_selection' with the following allowed values ['positive', 'neutral', 'negative'].\n\t+ mixed-emotion is of type 'multi\\_label\\_selection' with the following allowed values ['joy', 'anger', 'sadness', 'fear', 'surprise', 'love'].\n* Suggestions: As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable.\n\n\n\t+ (optional) sentiment-suggestion is of type 'label\\_selection' with the following allowed values ['positive', 'neutral', 'negative'].\n\t+ (optional) mixed-emotion-suggestion is of type 'multi\\_label\\_selection' with the following allowed values ['joy', 'anger', 'sadness', 'fear', 'surprise', 'love'].\n\n\nAdditionally, we also have two more fields that are optional and are the following:\n\n\n* metadata: This is an optional field that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n* external\\_id: This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file.", "### Data Splits\n\n\nThe dataset contains a single split, which is 'train'.\n\n\nDataset Creation\n----------------", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation guidelines\n\n\nEmotion is a dataset of English Twitter messages with six basic emotions: anger, fear, joy, love, sadness, and surprise.", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations\n\n\nAdditional Information\n----------------------", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#size_categories-n<1K #rlfh #argilla #human-feedback #region-us \n", "### Dataset Summary\n\n\nThis dataset contains:\n\n\n* A dataset configuration file conforming to the Argilla dataset format named 'URL'. This configuration file will be used to configure the dataset when using the 'FeedbackDataset.from\\_huggingface' method in Argilla.\n* Dataset records in a format compatible with HuggingFace 'datasets'. These records will be loaded automatically when using 'FeedbackDataset.from\\_huggingface' and can be loaded independently using the 'datasets' library via 'load\\_dataset'.\n* The annotation guidelines that have been used for building and curating the dataset, if they've been defined in Argilla.", "### Load with Argilla\n\n\nTo load with Argilla, you'll just need to install Argilla as 'pip install argilla --upgrade' and then use the following code:", "### Load with 'datasets'\n\n\nTo load this dataset with 'datasets', you'll just need to install 'datasets' as 'pip install datasets --upgrade' and then use the following code:", "### Supported Tasks and Leaderboards\n\n\nThis dataset can contain multiple fields, questions and responses so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the Dataset Structure section.\n\n\nThere are no leaderboards associated with this dataset.", "### Languages\n\n\nDataset Structure\n-----------------", "### Data in Argilla\n\n\nThe dataset is created in Argilla with: fields, questions, suggestions, metadata, vectors, and guidelines.\n\n\nThe fields are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\nThe questions are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label\\_selection, multi\\_label\\_selection, or ranking.\n\n\n\nThe suggestions are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending \"-suggestion\" and \"-suggestion-metadata\" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with \"-suggestion\" and the metadata is appended with \"-suggestion-metadata\".\n\n\nThe metadata is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n\n\n\nThe guidelines, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the annotation guidelines section.", "### Data Instances\n\n\nAn example of a dataset instance in Argilla looks as follows:\n\n\nWhile the same record in HuggingFace 'datasets' looks as follows:", "### Data Fields\n\n\nAmong the dataset fields, we differentiate between the following:\n\n\n* Fields: These are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\t+ text is of type 'text'.\n* Questions: These are the questions that will be asked to the annotators. They can be of different types, such as 'RatingQuestion', 'TextQuestion', 'LabelQuestion', 'MultiLabelQuestion', and 'RankingQuestion'.\n\n\n\t+ sentiment is of type 'label\\_selection' with the following allowed values ['positive', 'neutral', 'negative'].\n\t+ mixed-emotion is of type 'multi\\_label\\_selection' with the following allowed values ['joy', 'anger', 'sadness', 'fear', 'surprise', 'love'].\n* Suggestions: As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable.\n\n\n\t+ (optional) sentiment-suggestion is of type 'label\\_selection' with the following allowed values ['positive', 'neutral', 'negative'].\n\t+ (optional) mixed-emotion-suggestion is of type 'multi\\_label\\_selection' with the following allowed values ['joy', 'anger', 'sadness', 'fear', 'surprise', 'love'].\n\n\nAdditionally, we also have two more fields that are optional and are the following:\n\n\n* metadata: This is an optional field that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n* external\\_id: This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file.", "### Data Splits\n\n\nThe dataset contains a single split, which is 'train'.\n\n\nDataset Creation\n----------------", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation guidelines\n\n\nEmotion is a dataset of English Twitter messages with six basic emotions: anger, fear, joy, love, sadness, and surprise.", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations\n\n\nAdditional Information\n----------------------", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
6cff01b8c885dd865bcb7cb5b1d97611c6939d3a
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
podrivo/test
[ "region:us" ]
2024-01-23T01:22:08+00:00
{}
2024-01-23T01:27:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for Dataset Name This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
f341da146f36852d11a6a4e9a051996468e15ed3
# Dataset Card for Evaluation run of CultriX/MergeTrix-7B-v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [CultriX/MergeTrix-7B-v2](https://huggingface.co/CultriX/MergeTrix-7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_CultriX__MergeTrix-7B-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T01:33:45.756570](https://huggingface.co/datasets/open-llm-leaderboard/details_CultriX__MergeTrix-7B-v2/blob/main/results_2024-01-23T01-33-45.756570.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6550880097292562, "acc_stderr": 0.03206914305226512, "acc_norm": 0.6541828399839258, "acc_norm_stderr": 0.03275033920579298, "mc1": 0.5312117503059975, "mc1_stderr": 0.017469364874577516, "mc2": 0.6716543422722782, "mc2_stderr": 0.015319258452217491 }, "harness|arc:challenge|25": { "acc": 0.6988054607508533, "acc_stderr": 0.013406741767847629, "acc_norm": 0.726962457337884, "acc_norm_stderr": 0.013019332762635753 }, "harness|hellaswag|10": { "acc": 0.7176857199761004, "acc_stderr": 0.0044920552794071094, "acc_norm": 0.8847839075881299, "acc_norm_stderr": 0.0031863002304505727 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.674074074074074, "acc_stderr": 0.040491220417025055, "acc_norm": 0.674074074074074, "acc_norm_stderr": 0.040491220417025055 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7105263157894737, "acc_stderr": 0.03690677986137283, "acc_norm": 0.7105263157894737, "acc_norm_stderr": 0.03690677986137283 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7283018867924528, "acc_stderr": 0.027377706624670713, "acc_norm": 0.7283018867924528, "acc_norm_stderr": 0.027377706624670713 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.03586879280080341, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.03586879280080341 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.048580835742663454, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.048580835742663454 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.574468085106383, "acc_stderr": 0.03232146916224468, "acc_norm": 0.574468085106383, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370333, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370333 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4444444444444444, "acc_stderr": 0.025591857761382182, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.025591857761382182 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7935483870967742, "acc_stderr": 0.02302589961718872, "acc_norm": 0.7935483870967742, "acc_norm_stderr": 0.02302589961718872 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.035179450386910616, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.03192271569548301, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.03192271569548301 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9119170984455959, "acc_stderr": 0.02045374660160103, "acc_norm": 0.9119170984455959, "acc_norm_stderr": 0.02045374660160103 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6717948717948717, "acc_stderr": 0.023807633198657266, "acc_norm": 0.6717948717948717, "acc_norm_stderr": 0.023807633198657266 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37777777777777777, "acc_stderr": 0.029560707392465725, "acc_norm": 0.37777777777777777, "acc_norm_stderr": 0.029560707392465725 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.680672268907563, "acc_stderr": 0.030283995525884396, "acc_norm": 0.680672268907563, "acc_norm_stderr": 0.030283995525884396 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3841059602649007, "acc_stderr": 0.03971301814719197, "acc_norm": 0.3841059602649007, "acc_norm_stderr": 0.03971301814719197 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8440366972477065, "acc_stderr": 0.01555580271359017, "acc_norm": 0.8440366972477065, "acc_norm_stderr": 0.01555580271359017 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49537037037037035, "acc_stderr": 0.03409825519163572, "acc_norm": 0.49537037037037035, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.025524722324553346, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.025524722324553346 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.025955020841621115, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.025955020841621115 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477518, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477518 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159463, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159463 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098823, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098823 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243839, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243839 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8931623931623932, "acc_stderr": 0.02023714900899093, "acc_norm": 0.8931623931623932, "acc_norm_stderr": 0.02023714900899093 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8237547892720306, "acc_stderr": 0.013625556907993464, "acc_norm": 0.8237547892720306, "acc_norm_stderr": 0.013625556907993464 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7456647398843931, "acc_stderr": 0.02344582627654554, "acc_norm": 0.7456647398843931, "acc_norm_stderr": 0.02344582627654554 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.42905027932960893, "acc_stderr": 0.016553287863116033, "acc_norm": 0.42905027932960893, "acc_norm_stderr": 0.016553287863116033 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7320261437908496, "acc_stderr": 0.025360603796242557, "acc_norm": 0.7320261437908496, "acc_norm_stderr": 0.025360603796242557 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.02583989833487798, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4858156028368794, "acc_stderr": 0.02981549448368206, "acc_norm": 0.4858156028368794, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.470013037809648, "acc_stderr": 0.01274724896707906, "acc_norm": 0.470013037809648, "acc_norm_stderr": 0.01274724896707906 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6580882352941176, "acc_stderr": 0.028814722422254187, "acc_norm": 0.6580882352941176, "acc_norm_stderr": 0.028814722422254187 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6650326797385621, "acc_stderr": 0.019094228167000325, "acc_norm": 0.6650326797385621, "acc_norm_stderr": 0.019094228167000325 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.028535560337128448, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.028535560337128448 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.026508590656233278, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.026508590656233278 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774709, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.5312117503059975, "mc1_stderr": 0.017469364874577516, "mc2": 0.6716543422722782, "mc2_stderr": 0.015319258452217491 }, "harness|winogrande|5": { "acc": 0.8674033149171271, "acc_stderr": 0.009531472942402034 }, "harness|gsm8k|5": { "acc": 0.6914329037149356, "acc_stderr": 0.012723076049815906 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_CultriX__MergeTrix-7B-v2
[ "region:us" ]
2024-01-23T01:36:03+00:00
{"pretty_name": "Evaluation run of CultriX/MergeTrix-7B-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [CultriX/MergeTrix-7B-v2](https://huggingface.co/CultriX/MergeTrix-7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CultriX__MergeTrix-7B-v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T01:33:45.756570](https://huggingface.co/datasets/open-llm-leaderboard/details_CultriX__MergeTrix-7B-v2/blob/main/results_2024-01-23T01-33-45.756570.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6550880097292562,\n \"acc_stderr\": 0.03206914305226512,\n \"acc_norm\": 0.6541828399839258,\n \"acc_norm_stderr\": 0.03275033920579298,\n \"mc1\": 0.5312117503059975,\n \"mc1_stderr\": 0.017469364874577516,\n \"mc2\": 0.6716543422722782,\n \"mc2_stderr\": 0.015319258452217491\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6988054607508533,\n \"acc_stderr\": 0.013406741767847629,\n \"acc_norm\": 0.726962457337884,\n \"acc_norm_stderr\": 0.013019332762635753\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7176857199761004,\n \"acc_stderr\": 0.0044920552794071094,\n \"acc_norm\": 0.8847839075881299,\n \"acc_norm_stderr\": 0.0031863002304505727\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.674074074074074,\n \"acc_stderr\": 0.040491220417025055,\n \"acc_norm\": 0.674074074074074,\n \"acc_norm_stderr\": 0.040491220417025055\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7105263157894737,\n \"acc_stderr\": 0.03690677986137283,\n \"acc_norm\": 0.7105263157894737,\n \"acc_norm_stderr\": 0.03690677986137283\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7283018867924528,\n \"acc_stderr\": 0.027377706624670713,\n \"acc_norm\": 0.7283018867924528,\n \"acc_norm_stderr\": 0.027377706624670713\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.048580835742663454,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.048580835742663454\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.03232146916224468,\n \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.03232146916224468\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370333,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370333\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.025591857761382182,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.025591857761382182\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7935483870967742,\n \"acc_stderr\": 0.02302589961718872,\n \"acc_norm\": 0.7935483870967742,\n \"acc_norm_stderr\": 0.02302589961718872\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.03192271569548301,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.03192271569548301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9119170984455959,\n \"acc_stderr\": 0.02045374660160103,\n \"acc_norm\": 0.9119170984455959,\n \"acc_norm_stderr\": 0.02045374660160103\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6717948717948717,\n \"acc_stderr\": 0.023807633198657266,\n \"acc_norm\": 0.6717948717948717,\n \"acc_norm_stderr\": 0.023807633198657266\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37777777777777777,\n \"acc_stderr\": 0.029560707392465725,\n \"acc_norm\": 0.37777777777777777,\n \"acc_norm_stderr\": 0.029560707392465725\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.030283995525884396,\n \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.030283995525884396\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3841059602649007,\n \"acc_stderr\": 0.03971301814719197,\n \"acc_norm\": 0.3841059602649007,\n \"acc_norm_stderr\": 0.03971301814719197\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8440366972477065,\n \"acc_stderr\": 0.01555580271359017,\n \"acc_norm\": 0.8440366972477065,\n \"acc_norm_stderr\": 0.01555580271359017\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49537037037037035,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.49537037037037035,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.025524722324553346,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.025524722324553346\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.025955020841621115,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.025955020841621115\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477518,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477518\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159463,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159463\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098823,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098823\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8931623931623932,\n \"acc_stderr\": 0.02023714900899093,\n \"acc_norm\": 0.8931623931623932,\n \"acc_norm_stderr\": 0.02023714900899093\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8237547892720306,\n \"acc_stderr\": 0.013625556907993464,\n \"acc_norm\": 0.8237547892720306,\n \"acc_norm_stderr\": 0.013625556907993464\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7456647398843931,\n \"acc_stderr\": 0.02344582627654554,\n \"acc_norm\": 0.7456647398843931,\n \"acc_norm_stderr\": 0.02344582627654554\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.42905027932960893,\n \"acc_stderr\": 0.016553287863116033,\n \"acc_norm\": 0.42905027932960893,\n \"acc_norm_stderr\": 0.016553287863116033\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7320261437908496,\n \"acc_stderr\": 0.025360603796242557,\n \"acc_norm\": 0.7320261437908496,\n \"acc_norm_stderr\": 0.025360603796242557\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.470013037809648,\n \"acc_stderr\": 0.01274724896707906,\n \"acc_norm\": 0.470013037809648,\n \"acc_norm_stderr\": 0.01274724896707906\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6580882352941176,\n \"acc_stderr\": 0.028814722422254187,\n \"acc_norm\": 0.6580882352941176,\n \"acc_norm_stderr\": 0.028814722422254187\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6650326797385621,\n \"acc_stderr\": 0.019094228167000325,\n \"acc_norm\": 0.6650326797385621,\n \"acc_norm_stderr\": 0.019094228167000325\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.028535560337128448,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.028535560337128448\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.026508590656233278,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.026508590656233278\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5312117503059975,\n \"mc1_stderr\": 0.017469364874577516,\n \"mc2\": 0.6716543422722782,\n \"mc2_stderr\": 0.015319258452217491\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8674033149171271,\n \"acc_stderr\": 0.009531472942402034\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6914329037149356,\n \"acc_stderr\": 0.012723076049815906\n }\n}\n```", "repo_url": "https://huggingface.co/CultriX/MergeTrix-7B-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|arc:challenge|25_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|gsm8k|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hellaswag|10_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T01-33-45.756570.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["**/details_harness|winogrande|5_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T01-33-45.756570.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T01_33_45.756570", "path": ["results_2024-01-23T01-33-45.756570.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T01-33-45.756570.parquet"]}]}]}
2024-01-23T01:36:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of CultriX/MergeTrix-7B-v2 Dataset automatically created during the evaluation run of model CultriX/MergeTrix-7B-v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T01:33:45.756570(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of CultriX/MergeTrix-7B-v2\n\n\n\nDataset automatically created during the evaluation run of model CultriX/MergeTrix-7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T01:33:45.756570(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of CultriX/MergeTrix-7B-v2\n\n\n\nDataset automatically created during the evaluation run of model CultriX/MergeTrix-7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T01:33:45.756570(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
1fff66cddf97ed5b458037bced06ee6893bed44a
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
SEA-AI/crowdsourced-sea-images
[ "region:us" ]
2024-01-23T02:09:37+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data.csv"}]}]}
2024-02-12T14:32:57+00:00
[]
[]
TAGS #region-us
# Dataset Card for Dataset Name ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Dataset Name", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
66820c65b3798c95f5f68bad0355d2030d5ab923
# Dataset Card for Evaluation run of adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3](https://huggingface.co/adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_adamo1139__Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T02:09:10.146041](https://huggingface.co/datasets/open-llm-leaderboard/details_adamo1139__Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3/blob/main/results_2024-01-23T02-09-10.146041.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7545279834638094, "acc_stderr": 0.02841736788036784, "acc_norm": 0.7596153374287833, "acc_norm_stderr": 0.028950310267755346, "mc1": 0.3047735618115055, "mc1_stderr": 0.01611412415688246, "mc2": 0.45350711707317476, "mc2_stderr": 0.014345602059169307 }, "harness|arc:challenge|25": { "acc": 0.6143344709897611, "acc_stderr": 0.014224250973257184, "acc_norm": 0.6484641638225256, "acc_norm_stderr": 0.013952413699600933 }, "harness|hellaswag|10": { "acc": 0.6432981477793268, "acc_stderr": 0.004780467270911773, "acc_norm": 0.8477394941246763, "acc_norm_stderr": 0.0035853896364723826 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.7037037037037037, "acc_stderr": 0.03944624162501116, "acc_norm": 0.7037037037037037, "acc_norm_stderr": 0.03944624162501116 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.868421052631579, "acc_stderr": 0.027508689533549915, "acc_norm": 0.868421052631579, "acc_norm_stderr": 0.027508689533549915 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8150943396226416, "acc_stderr": 0.023893351834464317, "acc_norm": 0.8150943396226416, "acc_norm_stderr": 0.023893351834464317 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.875, "acc_stderr": 0.02765610492929436, "acc_norm": 0.875, "acc_norm_stderr": 0.02765610492929436 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7398843930635838, "acc_stderr": 0.033450369167889904, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.033450369167889904 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.5, "acc_stderr": 0.04975185951049946, "acc_norm": 0.5, "acc_norm_stderr": 0.04975185951049946 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.84, "acc_stderr": 0.03684529491774709, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7829787234042553, "acc_stderr": 0.026947483121496228, "acc_norm": 0.7829787234042553, "acc_norm_stderr": 0.026947483121496228 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5789473684210527, "acc_stderr": 0.046446020912223177, "acc_norm": 0.5789473684210527, "acc_norm_stderr": 0.046446020912223177 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7724137931034483, "acc_stderr": 0.03493950380131184, "acc_norm": 0.7724137931034483, "acc_norm_stderr": 0.03493950380131184 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.6428571428571429, "acc_stderr": 0.024677862841332783, "acc_norm": 0.6428571428571429, "acc_norm_stderr": 0.024677862841332783 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5873015873015873, "acc_stderr": 0.04403438954768176, "acc_norm": 0.5873015873015873, "acc_norm_stderr": 0.04403438954768176 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.896774193548387, "acc_stderr": 0.017308381281034534, "acc_norm": 0.896774193548387, "acc_norm_stderr": 0.017308381281034534 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6748768472906403, "acc_stderr": 0.032957975663112704, "acc_norm": 0.6748768472906403, "acc_norm_stderr": 0.032957975663112704 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.77, "acc_stderr": 0.042295258468165044, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165044 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8545454545454545, "acc_stderr": 0.027530196355066584, "acc_norm": 0.8545454545454545, "acc_norm_stderr": 0.027530196355066584 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9141414141414141, "acc_stderr": 0.01996022556317289, "acc_norm": 0.9141414141414141, "acc_norm_stderr": 0.01996022556317289 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9740932642487047, "acc_stderr": 0.01146452335695318, "acc_norm": 0.9740932642487047, "acc_norm_stderr": 0.01146452335695318 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.8051282051282052, "acc_stderr": 0.020083167595181393, "acc_norm": 0.8051282051282052, "acc_norm_stderr": 0.020083167595181393 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.02995824925008211, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.02995824925008211 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8361344537815126, "acc_stderr": 0.024044054940440488, "acc_norm": 0.8361344537815126, "acc_norm_stderr": 0.024044054940440488 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.48344370860927155, "acc_stderr": 0.0408024418562897, "acc_norm": 0.48344370860927155, "acc_norm_stderr": 0.0408024418562897 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9174311926605505, "acc_stderr": 0.011800361363016562, "acc_norm": 0.9174311926605505, "acc_norm_stderr": 0.011800361363016562 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6620370370370371, "acc_stderr": 0.03225941352631295, "acc_norm": 0.6620370370370371, "acc_norm_stderr": 0.03225941352631295 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9117647058823529, "acc_stderr": 0.019907399791316956, "acc_norm": 0.9117647058823529, "acc_norm_stderr": 0.019907399791316956 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9113924050632911, "acc_stderr": 0.018498315206865387, "acc_norm": 0.9113924050632911, "acc_norm_stderr": 0.018498315206865387 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8026905829596412, "acc_stderr": 0.02670985334496796, "acc_norm": 0.8026905829596412, "acc_norm_stderr": 0.02670985334496796 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8702290076335878, "acc_stderr": 0.029473649496907065, "acc_norm": 0.8702290076335878, "acc_norm_stderr": 0.029473649496907065 }, "harness|hendrycksTest-international_law|5": { "acc": 0.9008264462809917, "acc_stderr": 0.027285246312758957, "acc_norm": 0.9008264462809917, "acc_norm_stderr": 0.027285246312758957 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8796296296296297, "acc_stderr": 0.031457038543062504, "acc_norm": 0.8796296296296297, "acc_norm_stderr": 0.031457038543062504 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8895705521472392, "acc_stderr": 0.024624937788941318, "acc_norm": 0.8895705521472392, "acc_norm_stderr": 0.024624937788941318 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5446428571428571, "acc_stderr": 0.04726835553719098, "acc_norm": 0.5446428571428571, "acc_norm_stderr": 0.04726835553719098 }, "harness|hendrycksTest-management|5": { "acc": 0.8737864077669902, "acc_stderr": 0.03288180278808628, "acc_norm": 0.8737864077669902, "acc_norm_stderr": 0.03288180278808628 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9358974358974359, "acc_stderr": 0.016046261631673137, "acc_norm": 0.9358974358974359, "acc_norm_stderr": 0.016046261631673137 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8991060025542784, "acc_stderr": 0.010770472014886711, "acc_norm": 0.8991060025542784, "acc_norm_stderr": 0.010770472014886711 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8121387283236994, "acc_stderr": 0.021029269752423217, "acc_norm": 0.8121387283236994, "acc_norm_stderr": 0.021029269752423217 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6603351955307263, "acc_stderr": 0.015839400406212498, "acc_norm": 0.6603351955307263, "acc_norm_stderr": 0.015839400406212498 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.869281045751634, "acc_stderr": 0.01930187362421527, "acc_norm": 0.869281045751634, "acc_norm_stderr": 0.01930187362421527 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8392282958199357, "acc_stderr": 0.020862388082391888, "acc_norm": 0.8392282958199357, "acc_norm_stderr": 0.020862388082391888 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8703703703703703, "acc_stderr": 0.01868972572106207, "acc_norm": 0.8703703703703703, "acc_norm_stderr": 0.01868972572106207 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6099290780141844, "acc_stderr": 0.02909767559946393, "acc_norm": 0.6099290780141844, "acc_norm_stderr": 0.02909767559946393 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5951760104302477, "acc_stderr": 0.012536743830953977, "acc_norm": 0.5951760104302477, "acc_norm_stderr": 0.012536743830953977 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8088235294117647, "acc_stderr": 0.02388688192244033, "acc_norm": 0.8088235294117647, "acc_norm_stderr": 0.02388688192244033 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.8251633986928104, "acc_stderr": 0.01536616706478066, "acc_norm": 0.8251633986928104, "acc_norm_stderr": 0.01536616706478066 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04265792110940589, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04265792110940589 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8163265306122449, "acc_stderr": 0.024789071332007653, "acc_norm": 0.8163265306122449, "acc_norm_stderr": 0.024789071332007653 }, "harness|hendrycksTest-sociology|5": { "acc": 0.9154228855721394, "acc_stderr": 0.019675343217199173, "acc_norm": 0.9154228855721394, "acc_norm_stderr": 0.019675343217199173 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.92, "acc_stderr": 0.0272659924344291, "acc_norm": 0.92, "acc_norm_stderr": 0.0272659924344291 }, "harness|hendrycksTest-virology|5": { "acc": 0.572289156626506, "acc_stderr": 0.038515976837185335, "acc_norm": 0.572289156626506, "acc_norm_stderr": 0.038515976837185335 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.02517298435015577, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.02517298435015577 }, "harness|truthfulqa:mc|0": { "mc1": 0.3047735618115055, "mc1_stderr": 0.01611412415688246, "mc2": 0.45350711707317476, "mc2_stderr": 0.014345602059169307 }, "harness|winogrande|5": { "acc": 0.8310970797158642, "acc_stderr": 0.010529981411838916 }, "harness|gsm8k|5": { "acc": 0.6163760424564063, "acc_stderr": 0.013394238584938163 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_adamo1139__Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3
[ "region:us" ]
2024-01-23T02:11:23+00:00
{"pretty_name": "Evaluation run of adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3", "dataset_summary": "Dataset automatically created during the evaluation run of model [adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3](https://huggingface.co/adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_adamo1139__Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T02:09:10.146041](https://huggingface.co/datasets/open-llm-leaderboard/details_adamo1139__Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3/blob/main/results_2024-01-23T02-09-10.146041.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7545279834638094,\n \"acc_stderr\": 0.02841736788036784,\n \"acc_norm\": 0.7596153374287833,\n \"acc_norm_stderr\": 0.028950310267755346,\n \"mc1\": 0.3047735618115055,\n \"mc1_stderr\": 0.01611412415688246,\n \"mc2\": 0.45350711707317476,\n \"mc2_stderr\": 0.014345602059169307\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6143344709897611,\n \"acc_stderr\": 0.014224250973257184,\n \"acc_norm\": 0.6484641638225256,\n \"acc_norm_stderr\": 0.013952413699600933\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6432981477793268,\n \"acc_stderr\": 0.004780467270911773,\n \"acc_norm\": 0.8477394941246763,\n \"acc_norm_stderr\": 0.0035853896364723826\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.7037037037037037,\n \"acc_stderr\": 0.03944624162501116,\n \"acc_norm\": 0.7037037037037037,\n \"acc_norm_stderr\": 0.03944624162501116\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.868421052631579,\n \"acc_stderr\": 0.027508689533549915,\n \"acc_norm\": 0.868421052631579,\n \"acc_norm_stderr\": 0.027508689533549915\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8150943396226416,\n \"acc_stderr\": 0.023893351834464317,\n \"acc_norm\": 0.8150943396226416,\n \"acc_norm_stderr\": 0.023893351834464317\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.875,\n \"acc_stderr\": 0.02765610492929436,\n \"acc_norm\": 0.875,\n \"acc_norm_stderr\": 0.02765610492929436\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.033450369167889904,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.033450369167889904\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04975185951049946,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04975185951049946\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7829787234042553,\n \"acc_stderr\": 0.026947483121496228,\n \"acc_norm\": 0.7829787234042553,\n \"acc_norm_stderr\": 0.026947483121496228\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5789473684210527,\n \"acc_stderr\": 0.046446020912223177,\n \"acc_norm\": 0.5789473684210527,\n \"acc_norm_stderr\": 0.046446020912223177\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7724137931034483,\n \"acc_stderr\": 0.03493950380131184,\n \"acc_norm\": 0.7724137931034483,\n \"acc_norm_stderr\": 0.03493950380131184\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.6428571428571429,\n \"acc_stderr\": 0.024677862841332783,\n \"acc_norm\": 0.6428571428571429,\n \"acc_norm_stderr\": 0.024677862841332783\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5873015873015873,\n \"acc_stderr\": 0.04403438954768176,\n \"acc_norm\": 0.5873015873015873,\n \"acc_norm_stderr\": 0.04403438954768176\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.896774193548387,\n \"acc_stderr\": 0.017308381281034534,\n \"acc_norm\": 0.896774193548387,\n \"acc_norm_stderr\": 0.017308381281034534\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6748768472906403,\n \"acc_stderr\": 0.032957975663112704,\n \"acc_norm\": 0.6748768472906403,\n \"acc_norm_stderr\": 0.032957975663112704\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165044,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165044\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8545454545454545,\n \"acc_stderr\": 0.027530196355066584,\n \"acc_norm\": 0.8545454545454545,\n \"acc_norm_stderr\": 0.027530196355066584\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9141414141414141,\n \"acc_stderr\": 0.01996022556317289,\n \"acc_norm\": 0.9141414141414141,\n \"acc_norm_stderr\": 0.01996022556317289\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9740932642487047,\n \"acc_stderr\": 0.01146452335695318,\n \"acc_norm\": 0.9740932642487047,\n \"acc_norm_stderr\": 0.01146452335695318\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.8051282051282052,\n \"acc_stderr\": 0.020083167595181393,\n \"acc_norm\": 0.8051282051282052,\n \"acc_norm_stderr\": 0.020083167595181393\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.02995824925008211,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.02995824925008211\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8361344537815126,\n \"acc_stderr\": 0.024044054940440488,\n \"acc_norm\": 0.8361344537815126,\n \"acc_norm_stderr\": 0.024044054940440488\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.48344370860927155,\n \"acc_stderr\": 0.0408024418562897,\n \"acc_norm\": 0.48344370860927155,\n \"acc_norm_stderr\": 0.0408024418562897\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9174311926605505,\n \"acc_stderr\": 0.011800361363016562,\n \"acc_norm\": 0.9174311926605505,\n \"acc_norm_stderr\": 0.011800361363016562\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6620370370370371,\n \"acc_stderr\": 0.03225941352631295,\n \"acc_norm\": 0.6620370370370371,\n \"acc_norm_stderr\": 0.03225941352631295\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9117647058823529,\n \"acc_stderr\": 0.019907399791316956,\n \"acc_norm\": 0.9117647058823529,\n \"acc_norm_stderr\": 0.019907399791316956\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9113924050632911,\n \"acc_stderr\": 0.018498315206865387,\n \"acc_norm\": 0.9113924050632911,\n \"acc_norm_stderr\": 0.018498315206865387\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8026905829596412,\n \"acc_stderr\": 0.02670985334496796,\n \"acc_norm\": 0.8026905829596412,\n \"acc_norm_stderr\": 0.02670985334496796\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8702290076335878,\n \"acc_stderr\": 0.029473649496907065,\n \"acc_norm\": 0.8702290076335878,\n \"acc_norm_stderr\": 0.029473649496907065\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.9008264462809917,\n \"acc_stderr\": 0.027285246312758957,\n \"acc_norm\": 0.9008264462809917,\n \"acc_norm_stderr\": 0.027285246312758957\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8796296296296297,\n \"acc_stderr\": 0.031457038543062504,\n \"acc_norm\": 0.8796296296296297,\n \"acc_norm_stderr\": 0.031457038543062504\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8895705521472392,\n \"acc_stderr\": 0.024624937788941318,\n \"acc_norm\": 0.8895705521472392,\n \"acc_norm_stderr\": 0.024624937788941318\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5446428571428571,\n \"acc_stderr\": 0.04726835553719098,\n \"acc_norm\": 0.5446428571428571,\n \"acc_norm_stderr\": 0.04726835553719098\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8737864077669902,\n \"acc_stderr\": 0.03288180278808628,\n \"acc_norm\": 0.8737864077669902,\n \"acc_norm_stderr\": 0.03288180278808628\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9358974358974359,\n \"acc_stderr\": 0.016046261631673137,\n \"acc_norm\": 0.9358974358974359,\n \"acc_norm_stderr\": 0.016046261631673137\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8991060025542784,\n \"acc_stderr\": 0.010770472014886711,\n \"acc_norm\": 0.8991060025542784,\n \"acc_norm_stderr\": 0.010770472014886711\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8121387283236994,\n \"acc_stderr\": 0.021029269752423217,\n \"acc_norm\": 0.8121387283236994,\n \"acc_norm_stderr\": 0.021029269752423217\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.6603351955307263,\n \"acc_stderr\": 0.015839400406212498,\n \"acc_norm\": 0.6603351955307263,\n \"acc_norm_stderr\": 0.015839400406212498\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.869281045751634,\n \"acc_stderr\": 0.01930187362421527,\n \"acc_norm\": 0.869281045751634,\n \"acc_norm_stderr\": 0.01930187362421527\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8392282958199357,\n \"acc_stderr\": 0.020862388082391888,\n \"acc_norm\": 0.8392282958199357,\n \"acc_norm_stderr\": 0.020862388082391888\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8703703703703703,\n \"acc_stderr\": 0.01868972572106207,\n \"acc_norm\": 0.8703703703703703,\n \"acc_norm_stderr\": 0.01868972572106207\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.6099290780141844,\n \"acc_stderr\": 0.02909767559946393,\n \"acc_norm\": 0.6099290780141844,\n \"acc_norm_stderr\": 0.02909767559946393\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5951760104302477,\n \"acc_stderr\": 0.012536743830953977,\n \"acc_norm\": 0.5951760104302477,\n \"acc_norm_stderr\": 0.012536743830953977\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8088235294117647,\n \"acc_stderr\": 0.02388688192244033,\n \"acc_norm\": 0.8088235294117647,\n \"acc_norm_stderr\": 0.02388688192244033\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.8251633986928104,\n \"acc_stderr\": 0.01536616706478066,\n \"acc_norm\": 0.8251633986928104,\n \"acc_norm_stderr\": 0.01536616706478066\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04265792110940589,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04265792110940589\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8163265306122449,\n \"acc_stderr\": 0.024789071332007653,\n \"acc_norm\": 0.8163265306122449,\n \"acc_norm_stderr\": 0.024789071332007653\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.9154228855721394,\n \"acc_stderr\": 0.019675343217199173,\n \"acc_norm\": 0.9154228855721394,\n \"acc_norm_stderr\": 0.019675343217199173\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.92,\n \"acc_stderr\": 0.0272659924344291,\n \"acc_norm\": 0.92,\n \"acc_norm_stderr\": 0.0272659924344291\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.572289156626506,\n \"acc_stderr\": 0.038515976837185335,\n \"acc_norm\": 0.572289156626506,\n \"acc_norm_stderr\": 0.038515976837185335\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8771929824561403,\n \"acc_stderr\": 0.02517298435015577,\n \"acc_norm\": 0.8771929824561403,\n \"acc_norm_stderr\": 0.02517298435015577\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3047735618115055,\n \"mc1_stderr\": 0.01611412415688246,\n \"mc2\": 0.45350711707317476,\n \"mc2_stderr\": 0.014345602059169307\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8310970797158642,\n \"acc_stderr\": 0.010529981411838916\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6163760424564063,\n \"acc_stderr\": 0.013394238584938163\n }\n}\n```", "repo_url": "https://huggingface.co/adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|arc:challenge|25_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|gsm8k|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hellaswag|10_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T02-09-10.146041.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["**/details_harness|winogrande|5_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T02-09-10.146041.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T02_09_10.146041", "path": ["results_2024-01-23T02-09-10.146041.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T02-09-10.146041.parquet"]}]}]}
2024-01-23T02:11:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3 Dataset automatically created during the evaluation run of model adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T02:09:10.146041(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T02:09:10.146041(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Yi-34B-200K-rawrr1-LORA-DPO-experimental-r3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T02:09:10.146041(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
c97d9f6381fae6c86fa686e3fc3aac9de36b0ba7
# Dataset Card for Evaluation run of LordNoah/Alpaca_spin_gpt2_e1_se0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [LordNoah/Alpaca_spin_gpt2_e1_se0](https://huggingface.co/LordNoah/Alpaca_spin_gpt2_e1_se0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_LordNoah__Alpaca_spin_gpt2_e1_se0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T02:15:43.434636](https://huggingface.co/datasets/open-llm-leaderboard/details_LordNoah__Alpaca_spin_gpt2_e1_se0/blob/main/results_2024-01-23T02-15-43.434636.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2686575062749826, "acc_stderr": 0.03128827530334957, "acc_norm": 0.27023396613942313, "acc_norm_stderr": 0.03212003408578007, "mc1": 0.2178702570379437, "mc1_stderr": 0.014450846714123892, "mc2": 0.3905583201208923, "mc2_stderr": 0.014224369312263067 }, "harness|arc:challenge|25": { "acc": 0.26023890784982934, "acc_stderr": 0.01282193022511256, "acc_norm": 0.27986348122866894, "acc_norm_stderr": 0.013119040897725922 }, "harness|hellaswag|10": { "acc": 0.36566421031667, "acc_stderr": 0.0048063163427093936, "acc_norm": 0.45737900816570404, "acc_norm_stderr": 0.004971619995879763 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.04408440022768081, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768081 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.3037037037037037, "acc_stderr": 0.039725528847851375, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.039725528847851375 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.3223684210526316, "acc_stderr": 0.038035102483515854, "acc_norm": 0.3223684210526316, "acc_norm_stderr": 0.038035102483515854 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.22, "acc_stderr": 0.041633319989322674, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322674 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.3320754716981132, "acc_stderr": 0.02898545565233439, "acc_norm": 0.3320754716981132, "acc_norm_stderr": 0.02898545565233439 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.17, "acc_stderr": 0.03775251680686371, "acc_norm": 0.17, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2543352601156069, "acc_stderr": 0.0332055644308557, "acc_norm": 0.2543352601156069, "acc_norm_stderr": 0.0332055644308557 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.2549019607843137, "acc_stderr": 0.04336432707993177, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.04336432707993177 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3148936170212766, "acc_stderr": 0.030363582197238167, "acc_norm": 0.3148936170212766, "acc_norm_stderr": 0.030363582197238167 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669415, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.296551724137931, "acc_stderr": 0.038061426873099935, "acc_norm": 0.296551724137931, "acc_norm_stderr": 0.038061426873099935 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2724867724867725, "acc_stderr": 0.02293097307163335, "acc_norm": 0.2724867724867725, "acc_norm_stderr": 0.02293097307163335 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.1746031746031746, "acc_stderr": 0.03395490020856113, "acc_norm": 0.1746031746031746, "acc_norm_stderr": 0.03395490020856113 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.25161290322580643, "acc_stderr": 0.024685979286239956, "acc_norm": 0.25161290322580643, "acc_norm_stderr": 0.024685979286239956 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.33497536945812806, "acc_stderr": 0.033208527423483104, "acc_norm": 0.33497536945812806, "acc_norm_stderr": 0.033208527423483104 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2787878787878788, "acc_stderr": 0.03501438706296781, "acc_norm": 0.2787878787878788, "acc_norm_stderr": 0.03501438706296781 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.3484848484848485, "acc_stderr": 0.03394853965156402, "acc_norm": 0.3484848484848485, "acc_norm_stderr": 0.03394853965156402 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.2538860103626943, "acc_stderr": 0.03141024780565319, "acc_norm": 0.2538860103626943, "acc_norm_stderr": 0.03141024780565319 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.36153846153846153, "acc_stderr": 0.024359581465396987, "acc_norm": 0.36153846153846153, "acc_norm_stderr": 0.024359581465396987 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.02708037281514566, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.02708037281514566 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.03631329803969653, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.03631329803969653 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3155963302752294, "acc_stderr": 0.019926117513869666, "acc_norm": 0.3155963302752294, "acc_norm_stderr": 0.019926117513869666 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.24074074074074073, "acc_stderr": 0.029157522184605607, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.029157522184605607 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.2696078431372549, "acc_stderr": 0.03114557065948678, "acc_norm": 0.2696078431372549, "acc_norm_stderr": 0.03114557065948678 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.27848101265822783, "acc_stderr": 0.029178682304842548, "acc_norm": 0.27848101265822783, "acc_norm_stderr": 0.029178682304842548 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.1210762331838565, "acc_stderr": 0.021894174113185737, "acc_norm": 0.1210762331838565, "acc_norm_stderr": 0.021894174113185737 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2366412213740458, "acc_stderr": 0.03727673575596918, "acc_norm": 0.2366412213740458, "acc_norm_stderr": 0.03727673575596918 }, "harness|hendrycksTest-international_law|5": { "acc": 0.3305785123966942, "acc_stderr": 0.04294340845212094, "acc_norm": 0.3305785123966942, "acc_norm_stderr": 0.04294340845212094 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.24074074074074073, "acc_stderr": 0.04133119440243839, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.04133119440243839 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3067484662576687, "acc_stderr": 0.036230899157241474, "acc_norm": 0.3067484662576687, "acc_norm_stderr": 0.036230899157241474 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.24107142857142858, "acc_stderr": 0.04059867246952687, "acc_norm": 0.24107142857142858, "acc_norm_stderr": 0.04059867246952687 }, "harness|hendrycksTest-management|5": { "acc": 0.3592233009708738, "acc_stderr": 0.04750458399041692, "acc_norm": 0.3592233009708738, "acc_norm_stderr": 0.04750458399041692 }, "harness|hendrycksTest-marketing|5": { "acc": 0.24358974358974358, "acc_stderr": 0.028120966503914394, "acc_norm": 0.24358974358974358, "acc_norm_stderr": 0.028120966503914394 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.2, "acc_stderr": 0.040201512610368445, "acc_norm": 0.2, "acc_norm_stderr": 0.040201512610368445 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.20434227330779056, "acc_stderr": 0.0144191239809319, "acc_norm": 0.20434227330779056, "acc_norm_stderr": 0.0144191239809319 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.29190751445086704, "acc_stderr": 0.024476994076247333, "acc_norm": 0.29190751445086704, "acc_norm_stderr": 0.024476994076247333 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24509803921568626, "acc_stderr": 0.02463004897982478, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.02463004897982478 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.28938906752411575, "acc_stderr": 0.025755865922632938, "acc_norm": 0.28938906752411575, "acc_norm_stderr": 0.025755865922632938 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2222222222222222, "acc_stderr": 0.023132376234543325, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.023132376234543325 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.25177304964539005, "acc_stderr": 0.025892151156709405, "acc_norm": 0.25177304964539005, "acc_norm_stderr": 0.025892151156709405 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24771838331160365, "acc_stderr": 0.011025499291443737, "acc_norm": 0.24771838331160365, "acc_norm_stderr": 0.011025499291443737 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.21323529411764705, "acc_stderr": 0.024880971512294285, "acc_norm": 0.21323529411764705, "acc_norm_stderr": 0.024880971512294285 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2434640522875817, "acc_stderr": 0.017362473762146627, "acc_norm": 0.2434640522875817, "acc_norm_stderr": 0.017362473762146627 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.20909090909090908, "acc_stderr": 0.038950910157241364, "acc_norm": 0.20909090909090908, "acc_norm_stderr": 0.038950910157241364 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.27755102040816326, "acc_stderr": 0.02866685779027465, "acc_norm": 0.27755102040816326, "acc_norm_stderr": 0.02866685779027465 }, "harness|hendrycksTest-sociology|5": { "acc": 0.26865671641791045, "acc_stderr": 0.03134328358208954, "acc_norm": 0.26865671641791045, "acc_norm_stderr": 0.03134328358208954 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-virology|5": { "acc": 0.24096385542168675, "acc_stderr": 0.0332939411907353, "acc_norm": 0.24096385542168675, "acc_norm_stderr": 0.0332939411907353 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.29239766081871343, "acc_stderr": 0.034886477134579215, "acc_norm": 0.29239766081871343, "acc_norm_stderr": 0.034886477134579215 }, "harness|truthfulqa:mc|0": { "mc1": 0.2178702570379437, "mc1_stderr": 0.014450846714123892, "mc2": 0.3905583201208923, "mc2_stderr": 0.014224369312263067 }, "harness|winogrande|5": { "acc": 0.5556432517758485, "acc_stderr": 0.013965196769083553 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_LordNoah__Alpaca_spin_gpt2_e1_se0
[ "region:us" ]
2024-01-23T02:17:01+00:00
{"pretty_name": "Evaluation run of LordNoah/Alpaca_spin_gpt2_e1_se0", "dataset_summary": "Dataset automatically created during the evaluation run of model [LordNoah/Alpaca_spin_gpt2_e1_se0](https://huggingface.co/LordNoah/Alpaca_spin_gpt2_e1_se0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_LordNoah__Alpaca_spin_gpt2_e1_se0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T02:15:43.434636](https://huggingface.co/datasets/open-llm-leaderboard/details_LordNoah__Alpaca_spin_gpt2_e1_se0/blob/main/results_2024-01-23T02-15-43.434636.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2686575062749826,\n \"acc_stderr\": 0.03128827530334957,\n \"acc_norm\": 0.27023396613942313,\n \"acc_norm_stderr\": 0.03212003408578007,\n \"mc1\": 0.2178702570379437,\n \"mc1_stderr\": 0.014450846714123892,\n \"mc2\": 0.3905583201208923,\n \"mc2_stderr\": 0.014224369312263067\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.26023890784982934,\n \"acc_stderr\": 0.01282193022511256,\n \"acc_norm\": 0.27986348122866894,\n \"acc_norm_stderr\": 0.013119040897725922\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.36566421031667,\n \"acc_stderr\": 0.0048063163427093936,\n \"acc_norm\": 0.45737900816570404,\n \"acc_norm_stderr\": 0.004971619995879763\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3037037037037037,\n \"acc_stderr\": 0.039725528847851375,\n \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.039725528847851375\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.3223684210526316,\n \"acc_stderr\": 0.038035102483515854,\n \"acc_norm\": 0.3223684210526316,\n \"acc_norm_stderr\": 0.038035102483515854\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322674,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322674\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.3320754716981132,\n \"acc_stderr\": 0.02898545565233439,\n \"acc_norm\": 0.3320754716981132,\n \"acc_norm_stderr\": 0.02898545565233439\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.17,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2543352601156069,\n \"acc_stderr\": 0.0332055644308557,\n \"acc_norm\": 0.2543352601156069,\n \"acc_norm_stderr\": 0.0332055644308557\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.04336432707993177,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.04336432707993177\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3148936170212766,\n \"acc_stderr\": 0.030363582197238167,\n \"acc_norm\": 0.3148936170212766,\n \"acc_norm_stderr\": 0.030363582197238167\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.22807017543859648,\n \"acc_stderr\": 0.03947152782669415,\n \"acc_norm\": 0.22807017543859648,\n \"acc_norm_stderr\": 0.03947152782669415\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.296551724137931,\n \"acc_stderr\": 0.038061426873099935,\n \"acc_norm\": 0.296551724137931,\n \"acc_norm_stderr\": 0.038061426873099935\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2724867724867725,\n \"acc_stderr\": 0.02293097307163335,\n \"acc_norm\": 0.2724867724867725,\n \"acc_norm_stderr\": 0.02293097307163335\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.1746031746031746,\n \"acc_stderr\": 0.03395490020856113,\n \"acc_norm\": 0.1746031746031746,\n \"acc_norm_stderr\": 0.03395490020856113\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.25161290322580643,\n \"acc_stderr\": 0.024685979286239956,\n \"acc_norm\": 0.25161290322580643,\n \"acc_norm_stderr\": 0.024685979286239956\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.33497536945812806,\n \"acc_stderr\": 0.033208527423483104,\n \"acc_norm\": 0.33497536945812806,\n \"acc_norm_stderr\": 0.033208527423483104\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2787878787878788,\n \"acc_stderr\": 0.03501438706296781,\n \"acc_norm\": 0.2787878787878788,\n \"acc_norm_stderr\": 0.03501438706296781\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.3484848484848485,\n \"acc_stderr\": 0.03394853965156402,\n \"acc_norm\": 0.3484848484848485,\n \"acc_norm_stderr\": 0.03394853965156402\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.2538860103626943,\n \"acc_stderr\": 0.03141024780565319,\n \"acc_norm\": 0.2538860103626943,\n \"acc_norm_stderr\": 0.03141024780565319\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.36153846153846153,\n \"acc_stderr\": 0.024359581465396987,\n \"acc_norm\": 0.36153846153846153,\n \"acc_norm_stderr\": 0.024359581465396987\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.27037037037037037,\n \"acc_stderr\": 0.02708037281514566,\n \"acc_norm\": 0.27037037037037037,\n \"acc_norm_stderr\": 0.02708037281514566\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.271523178807947,\n \"acc_stderr\": 0.03631329803969653,\n \"acc_norm\": 0.271523178807947,\n \"acc_norm_stderr\": 0.03631329803969653\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.3155963302752294,\n \"acc_stderr\": 0.019926117513869666,\n \"acc_norm\": 0.3155963302752294,\n \"acc_norm_stderr\": 0.019926117513869666\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.029157522184605607,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.029157522184605607\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.2696078431372549,\n \"acc_stderr\": 0.03114557065948678,\n \"acc_norm\": 0.2696078431372549,\n \"acc_norm_stderr\": 0.03114557065948678\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.27848101265822783,\n \"acc_stderr\": 0.029178682304842548,\n \"acc_norm\": 0.27848101265822783,\n \"acc_norm_stderr\": 0.029178682304842548\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.1210762331838565,\n \"acc_stderr\": 0.021894174113185737,\n \"acc_norm\": 0.1210762331838565,\n \"acc_norm_stderr\": 0.021894174113185737\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2366412213740458,\n \"acc_stderr\": 0.03727673575596918,\n \"acc_norm\": 0.2366412213740458,\n \"acc_norm_stderr\": 0.03727673575596918\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.3305785123966942,\n \"acc_stderr\": 0.04294340845212094,\n \"acc_norm\": 0.3305785123966942,\n \"acc_norm_stderr\": 0.04294340845212094\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.3067484662576687,\n \"acc_stderr\": 0.036230899157241474,\n \"acc_norm\": 0.3067484662576687,\n \"acc_norm_stderr\": 0.036230899157241474\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.24107142857142858,\n \"acc_stderr\": 0.04059867246952687,\n \"acc_norm\": 0.24107142857142858,\n \"acc_norm_stderr\": 0.04059867246952687\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.3592233009708738,\n \"acc_stderr\": 0.04750458399041692,\n \"acc_norm\": 0.3592233009708738,\n \"acc_norm_stderr\": 0.04750458399041692\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.24358974358974358,\n \"acc_stderr\": 0.028120966503914394,\n \"acc_norm\": 0.24358974358974358,\n \"acc_norm_stderr\": 0.028120966503914394\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.040201512610368445,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.040201512610368445\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.20434227330779056,\n \"acc_stderr\": 0.0144191239809319,\n \"acc_norm\": 0.20434227330779056,\n \"acc_norm_stderr\": 0.0144191239809319\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.29190751445086704,\n \"acc_stderr\": 0.024476994076247333,\n \"acc_norm\": 0.29190751445086704,\n \"acc_norm_stderr\": 0.024476994076247333\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.02463004897982478,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.02463004897982478\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.28938906752411575,\n \"acc_stderr\": 0.025755865922632938,\n \"acc_norm\": 0.28938906752411575,\n \"acc_norm_stderr\": 0.025755865922632938\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.023132376234543325,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.023132376234543325\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.25177304964539005,\n \"acc_stderr\": 0.025892151156709405,\n \"acc_norm\": 0.25177304964539005,\n \"acc_norm_stderr\": 0.025892151156709405\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24771838331160365,\n \"acc_stderr\": 0.011025499291443737,\n \"acc_norm\": 0.24771838331160365,\n \"acc_norm_stderr\": 0.011025499291443737\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.21323529411764705,\n \"acc_stderr\": 0.024880971512294285,\n \"acc_norm\": 0.21323529411764705,\n \"acc_norm_stderr\": 0.024880971512294285\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2434640522875817,\n \"acc_stderr\": 0.017362473762146627,\n \"acc_norm\": 0.2434640522875817,\n \"acc_norm_stderr\": 0.017362473762146627\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.20909090909090908,\n \"acc_stderr\": 0.038950910157241364,\n \"acc_norm\": 0.20909090909090908,\n \"acc_norm_stderr\": 0.038950910157241364\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.27755102040816326,\n \"acc_stderr\": 0.02866685779027465,\n \"acc_norm\": 0.27755102040816326,\n \"acc_norm_stderr\": 0.02866685779027465\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.26865671641791045,\n \"acc_stderr\": 0.03134328358208954,\n \"acc_norm\": 0.26865671641791045,\n \"acc_norm_stderr\": 0.03134328358208954\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.24096385542168675,\n \"acc_stderr\": 0.0332939411907353,\n \"acc_norm\": 0.24096385542168675,\n \"acc_norm_stderr\": 0.0332939411907353\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.29239766081871343,\n \"acc_stderr\": 0.034886477134579215,\n \"acc_norm\": 0.29239766081871343,\n \"acc_norm_stderr\": 0.034886477134579215\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2178702570379437,\n \"mc1_stderr\": 0.014450846714123892,\n \"mc2\": 0.3905583201208923,\n \"mc2_stderr\": 0.014224369312263067\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5556432517758485,\n \"acc_stderr\": 0.013965196769083553\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/LordNoah/Alpaca_spin_gpt2_e1_se0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|arc:challenge|25_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|gsm8k|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hellaswag|10_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T02-15-43.434636.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["**/details_harness|winogrande|5_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T02-15-43.434636.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T02_15_43.434636", "path": ["results_2024-01-23T02-15-43.434636.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T02-15-43.434636.parquet"]}]}]}
2024-01-23T02:17:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of LordNoah/Alpaca_spin_gpt2_e1_se0 Dataset automatically created during the evaluation run of model LordNoah/Alpaca_spin_gpt2_e1_se0 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T02:15:43.434636(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of LordNoah/Alpaca_spin_gpt2_e1_se0\n\n\n\nDataset automatically created during the evaluation run of model LordNoah/Alpaca_spin_gpt2_e1_se0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T02:15:43.434636(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of LordNoah/Alpaca_spin_gpt2_e1_se0\n\n\n\nDataset automatically created during the evaluation run of model LordNoah/Alpaca_spin_gpt2_e1_se0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T02:15:43.434636(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
3d9f171bfe47b3efc42b0f932b19ff14a012d1e6
# Dataset Card for Evaluation run of LordNoah/Alpaca_refine_gpt2_e1_se0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [LordNoah/Alpaca_refine_gpt2_e1_se0](https://huggingface.co/LordNoah/Alpaca_refine_gpt2_e1_se0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_LordNoah__Alpaca_refine_gpt2_e1_se0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T02:16:01.366997](https://huggingface.co/datasets/open-llm-leaderboard/details_LordNoah__Alpaca_refine_gpt2_e1_se0/blob/main/results_2024-01-23T02-16-01.366997.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.26714671218496217, "acc_stderr": 0.031235793020424174, "acc_norm": 0.26841740481590964, "acc_norm_stderr": 0.03206394031126415, "mc1": 0.21542227662178703, "mc1_stderr": 0.014391902652427674, "mc2": 0.37277525939261963, "mc2_stderr": 0.014133518715389181 }, "harness|arc:challenge|25": { "acc": 0.26109215017064846, "acc_stderr": 0.012835523909473848, "acc_norm": 0.27303754266211605, "acc_norm_stderr": 0.013019332762635744 }, "harness|hellaswag|10": { "acc": 0.3663612826130253, "acc_stderr": 0.0048082512696824295, "acc_norm": 0.4538936466839275, "acc_norm_stderr": 0.004968521608065469 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.04408440022768081, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768081 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2962962962962963, "acc_stderr": 0.03944624162501116, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.03944624162501116 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.2894736842105263, "acc_stderr": 0.03690677986137282, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.03690677986137282 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.3320754716981132, "acc_stderr": 0.028985455652334388, "acc_norm": 0.3320754716981132, "acc_norm_stderr": 0.028985455652334388 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.17, "acc_stderr": 0.03775251680686371, "acc_norm": 0.17, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24855491329479767, "acc_stderr": 0.03295304696818318, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.03295304696818318 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.042801058373643966, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.042801058373643966 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.04512608598542129, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542129 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2978723404255319, "acc_stderr": 0.029896145682095462, "acc_norm": 0.2978723404255319, "acc_norm_stderr": 0.029896145682095462 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.30344827586206896, "acc_stderr": 0.038312260488503336, "acc_norm": 0.30344827586206896, "acc_norm_stderr": 0.038312260488503336 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2804232804232804, "acc_stderr": 0.023135287974325635, "acc_norm": 0.2804232804232804, "acc_norm_stderr": 0.023135287974325635 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.1984126984126984, "acc_stderr": 0.035670166752768635, "acc_norm": 0.1984126984126984, "acc_norm_stderr": 0.035670166752768635 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.25161290322580643, "acc_stderr": 0.024685979286239956, "acc_norm": 0.25161290322580643, "acc_norm_stderr": 0.024685979286239956 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.33497536945812806, "acc_stderr": 0.033208527423483104, "acc_norm": 0.33497536945812806, "acc_norm_stderr": 0.033208527423483104 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.26666666666666666, "acc_stderr": 0.03453131801885415, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.03453131801885415 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.3484848484848485, "acc_stderr": 0.033948539651564025, "acc_norm": 0.3484848484848485, "acc_norm_stderr": 0.033948539651564025 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.29533678756476683, "acc_stderr": 0.03292296639155139, "acc_norm": 0.29533678756476683, "acc_norm_stderr": 0.03292296639155139 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.36153846153846153, "acc_stderr": 0.024359581465396987, "acc_norm": 0.36153846153846153, "acc_norm_stderr": 0.024359581465396987 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.02708037281514566, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.02708037281514566 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.03631329803969653, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.03631329803969653 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.30642201834862387, "acc_stderr": 0.019765517220458523, "acc_norm": 0.30642201834862387, "acc_norm_stderr": 0.019765517220458523 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.24074074074074073, "acc_stderr": 0.029157522184605603, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.029157522184605603 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.28431372549019607, "acc_stderr": 0.031660096793998116, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.031660096793998116 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.28270042194092826, "acc_stderr": 0.029312814153955917, "acc_norm": 0.28270042194092826, "acc_norm_stderr": 0.029312814153955917 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.1210762331838565, "acc_stderr": 0.021894174113185737, "acc_norm": 0.1210762331838565, "acc_norm_stderr": 0.021894174113185737 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.22900763358778625, "acc_stderr": 0.036853466317118506, "acc_norm": 0.22900763358778625, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.3140495867768595, "acc_stderr": 0.042369647530410184, "acc_norm": 0.3140495867768595, "acc_norm_stderr": 0.042369647530410184 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3067484662576687, "acc_stderr": 0.036230899157241474, "acc_norm": 0.3067484662576687, "acc_norm_stderr": 0.036230899157241474 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467762, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467762 }, "harness|hendrycksTest-management|5": { "acc": 0.36893203883495146, "acc_stderr": 0.047776151811567386, "acc_norm": 0.36893203883495146, "acc_norm_stderr": 0.047776151811567386 }, "harness|hendrycksTest-marketing|5": { "acc": 0.23931623931623933, "acc_stderr": 0.027951826808924336, "acc_norm": 0.23931623931623933, "acc_norm_stderr": 0.027951826808924336 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.2, "acc_stderr": 0.040201512610368445, "acc_norm": 0.2, "acc_norm_stderr": 0.040201512610368445 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.20051085568326948, "acc_stderr": 0.014317653708594206, "acc_norm": 0.20051085568326948, "acc_norm_stderr": 0.014317653708594206 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.28901734104046245, "acc_stderr": 0.024405173935783238, "acc_norm": 0.28901734104046245, "acc_norm_stderr": 0.024405173935783238 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24509803921568626, "acc_stderr": 0.02463004897982478, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.02463004897982478 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.29260450160771706, "acc_stderr": 0.02583989833487798, "acc_norm": 0.29260450160771706, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.24074074074074073, "acc_stderr": 0.023788583551658533, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.023788583551658533 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24822695035460993, "acc_stderr": 0.025770015644290396, "acc_norm": 0.24822695035460993, "acc_norm_stderr": 0.025770015644290396 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24445893089960888, "acc_stderr": 0.010976425013113897, "acc_norm": 0.24445893089960888, "acc_norm_stderr": 0.010976425013113897 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.19852941176470587, "acc_stderr": 0.024231013370541114, "acc_norm": 0.19852941176470587, "acc_norm_stderr": 0.024231013370541114 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.238562091503268, "acc_stderr": 0.017242385828779613, "acc_norm": 0.238562091503268, "acc_norm_stderr": 0.017242385828779613 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.20909090909090908, "acc_stderr": 0.038950910157241364, "acc_norm": 0.20909090909090908, "acc_norm_stderr": 0.038950910157241364 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2163265306122449, "acc_stderr": 0.02635891633490403, "acc_norm": 0.2163265306122449, "acc_norm_stderr": 0.02635891633490403 }, "harness|hendrycksTest-sociology|5": { "acc": 0.2736318407960199, "acc_stderr": 0.03152439186555401, "acc_norm": 0.2736318407960199, "acc_norm_stderr": 0.03152439186555401 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-virology|5": { "acc": 0.21084337349397592, "acc_stderr": 0.031755547866299194, "acc_norm": 0.21084337349397592, "acc_norm_stderr": 0.031755547866299194 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.29239766081871343, "acc_stderr": 0.034886477134579215, "acc_norm": 0.29239766081871343, "acc_norm_stderr": 0.034886477134579215 }, "harness|truthfulqa:mc|0": { "mc1": 0.21542227662178703, "mc1_stderr": 0.014391902652427674, "mc2": 0.37277525939261963, "mc2_stderr": 0.014133518715389181 }, "harness|winogrande|5": { "acc": 0.5588003157063931, "acc_stderr": 0.013954975072834726 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_LordNoah__Alpaca_refine_gpt2_e1_se0
[ "region:us" ]
2024-01-23T02:17:19+00:00
{"pretty_name": "Evaluation run of LordNoah/Alpaca_refine_gpt2_e1_se0", "dataset_summary": "Dataset automatically created during the evaluation run of model [LordNoah/Alpaca_refine_gpt2_e1_se0](https://huggingface.co/LordNoah/Alpaca_refine_gpt2_e1_se0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_LordNoah__Alpaca_refine_gpt2_e1_se0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T02:16:01.366997](https://huggingface.co/datasets/open-llm-leaderboard/details_LordNoah__Alpaca_refine_gpt2_e1_se0/blob/main/results_2024-01-23T02-16-01.366997.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.26714671218496217,\n \"acc_stderr\": 0.031235793020424174,\n \"acc_norm\": 0.26841740481590964,\n \"acc_norm_stderr\": 0.03206394031126415,\n \"mc1\": 0.21542227662178703,\n \"mc1_stderr\": 0.014391902652427674,\n \"mc2\": 0.37277525939261963,\n \"mc2_stderr\": 0.014133518715389181\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.26109215017064846,\n \"acc_stderr\": 0.012835523909473848,\n \"acc_norm\": 0.27303754266211605,\n \"acc_norm_stderr\": 0.013019332762635744\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.3663612826130253,\n \"acc_stderr\": 0.0048082512696824295,\n \"acc_norm\": 0.4538936466839275,\n \"acc_norm_stderr\": 0.004968521608065469\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.03944624162501116,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.03944624162501116\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.2894736842105263,\n \"acc_stderr\": 0.03690677986137282,\n \"acc_norm\": 0.2894736842105263,\n \"acc_norm_stderr\": 0.03690677986137282\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.3320754716981132,\n \"acc_stderr\": 0.028985455652334388,\n \"acc_norm\": 0.3320754716981132,\n \"acc_norm_stderr\": 0.028985455652334388\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.17,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.03295304696818318,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.03295304696818318\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.042801058373643966,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.042801058373643966\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542129,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542129\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2978723404255319,\n \"acc_stderr\": 0.029896145682095462,\n \"acc_norm\": 0.2978723404255319,\n \"acc_norm_stderr\": 0.029896145682095462\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.30344827586206896,\n \"acc_stderr\": 0.038312260488503336,\n \"acc_norm\": 0.30344827586206896,\n \"acc_norm_stderr\": 0.038312260488503336\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2804232804232804,\n \"acc_stderr\": 0.023135287974325635,\n \"acc_norm\": 0.2804232804232804,\n \"acc_norm_stderr\": 0.023135287974325635\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.1984126984126984,\n \"acc_stderr\": 0.035670166752768635,\n \"acc_norm\": 0.1984126984126984,\n \"acc_norm_stderr\": 0.035670166752768635\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.25161290322580643,\n \"acc_stderr\": 0.024685979286239956,\n \"acc_norm\": 0.25161290322580643,\n \"acc_norm_stderr\": 0.024685979286239956\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.33497536945812806,\n \"acc_stderr\": 0.033208527423483104,\n \"acc_norm\": 0.33497536945812806,\n \"acc_norm_stderr\": 0.033208527423483104\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.03453131801885415,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.03453131801885415\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.3484848484848485,\n \"acc_stderr\": 0.033948539651564025,\n \"acc_norm\": 0.3484848484848485,\n \"acc_norm_stderr\": 0.033948539651564025\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.29533678756476683,\n \"acc_stderr\": 0.03292296639155139,\n \"acc_norm\": 0.29533678756476683,\n \"acc_norm_stderr\": 0.03292296639155139\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.36153846153846153,\n \"acc_stderr\": 0.024359581465396987,\n \"acc_norm\": 0.36153846153846153,\n \"acc_norm_stderr\": 0.024359581465396987\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.27037037037037037,\n \"acc_stderr\": 0.02708037281514566,\n \"acc_norm\": 0.27037037037037037,\n \"acc_norm_stderr\": 0.02708037281514566\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.271523178807947,\n \"acc_stderr\": 0.03631329803969653,\n \"acc_norm\": 0.271523178807947,\n \"acc_norm_stderr\": 0.03631329803969653\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.30642201834862387,\n \"acc_stderr\": 0.019765517220458523,\n \"acc_norm\": 0.30642201834862387,\n \"acc_norm_stderr\": 0.019765517220458523\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.029157522184605603,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.029157522184605603\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.031660096793998116,\n \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.031660096793998116\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.28270042194092826,\n \"acc_stderr\": 0.029312814153955917,\n \"acc_norm\": 0.28270042194092826,\n \"acc_norm_stderr\": 0.029312814153955917\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.1210762331838565,\n \"acc_stderr\": 0.021894174113185737,\n \"acc_norm\": 0.1210762331838565,\n \"acc_norm_stderr\": 0.021894174113185737\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.22900763358778625,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.22900763358778625,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.3140495867768595,\n \"acc_stderr\": 0.042369647530410184,\n \"acc_norm\": 0.3140495867768595,\n \"acc_norm_stderr\": 0.042369647530410184\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.3067484662576687,\n \"acc_stderr\": 0.036230899157241474,\n \"acc_norm\": 0.3067484662576687,\n \"acc_norm_stderr\": 0.036230899157241474\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n \"acc_stderr\": 0.04203277291467762,\n \"acc_norm\": 0.26785714285714285,\n \"acc_norm_stderr\": 0.04203277291467762\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.36893203883495146,\n \"acc_stderr\": 0.047776151811567386,\n \"acc_norm\": 0.36893203883495146,\n \"acc_norm_stderr\": 0.047776151811567386\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.23931623931623933,\n \"acc_stderr\": 0.027951826808924336,\n \"acc_norm\": 0.23931623931623933,\n \"acc_norm_stderr\": 0.027951826808924336\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.040201512610368445,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.040201512610368445\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.20051085568326948,\n \"acc_stderr\": 0.014317653708594206,\n \"acc_norm\": 0.20051085568326948,\n \"acc_norm_stderr\": 0.014317653708594206\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.28901734104046245,\n \"acc_stderr\": 0.024405173935783238,\n \"acc_norm\": 0.28901734104046245,\n \"acc_norm_stderr\": 0.024405173935783238\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.02463004897982478,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.02463004897982478\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.29260450160771706,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.29260450160771706,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.023788583551658533,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.023788583551658533\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24822695035460993,\n \"acc_stderr\": 0.025770015644290396,\n \"acc_norm\": 0.24822695035460993,\n \"acc_norm_stderr\": 0.025770015644290396\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24445893089960888,\n \"acc_stderr\": 0.010976425013113897,\n \"acc_norm\": 0.24445893089960888,\n \"acc_norm_stderr\": 0.010976425013113897\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.19852941176470587,\n \"acc_stderr\": 0.024231013370541114,\n \"acc_norm\": 0.19852941176470587,\n \"acc_norm_stderr\": 0.024231013370541114\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.238562091503268,\n \"acc_stderr\": 0.017242385828779613,\n \"acc_norm\": 0.238562091503268,\n \"acc_norm_stderr\": 0.017242385828779613\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.20909090909090908,\n \"acc_stderr\": 0.038950910157241364,\n \"acc_norm\": 0.20909090909090908,\n \"acc_norm_stderr\": 0.038950910157241364\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.2163265306122449,\n \"acc_stderr\": 0.02635891633490403,\n \"acc_norm\": 0.2163265306122449,\n \"acc_norm_stderr\": 0.02635891633490403\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.2736318407960199,\n \"acc_stderr\": 0.03152439186555401,\n \"acc_norm\": 0.2736318407960199,\n \"acc_norm_stderr\": 0.03152439186555401\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.21084337349397592,\n \"acc_stderr\": 0.031755547866299194,\n \"acc_norm\": 0.21084337349397592,\n \"acc_norm_stderr\": 0.031755547866299194\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.29239766081871343,\n \"acc_stderr\": 0.034886477134579215,\n \"acc_norm\": 0.29239766081871343,\n \"acc_norm_stderr\": 0.034886477134579215\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.21542227662178703,\n \"mc1_stderr\": 0.014391902652427674,\n \"mc2\": 0.37277525939261963,\n \"mc2_stderr\": 0.014133518715389181\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5588003157063931,\n \"acc_stderr\": 0.013954975072834726\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/LordNoah/Alpaca_refine_gpt2_e1_se0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|arc:challenge|25_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|gsm8k|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hellaswag|10_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T02-16-01.366997.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["**/details_harness|winogrande|5_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T02-16-01.366997.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T02_16_01.366997", "path": ["results_2024-01-23T02-16-01.366997.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T02-16-01.366997.parquet"]}]}]}
2024-01-23T02:17:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of LordNoah/Alpaca_refine_gpt2_e1_se0 Dataset automatically created during the evaluation run of model LordNoah/Alpaca_refine_gpt2_e1_se0 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T02:16:01.366997(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of LordNoah/Alpaca_refine_gpt2_e1_se0\n\n\n\nDataset automatically created during the evaluation run of model LordNoah/Alpaca_refine_gpt2_e1_se0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T02:16:01.366997(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of LordNoah/Alpaca_refine_gpt2_e1_se0\n\n\n\nDataset automatically created during the evaluation run of model LordNoah/Alpaca_refine_gpt2_e1_se0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T02:16:01.366997(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
fcff0f09c1fb108963c38c9614b960aa72f3938e
# Dataset Card for "ultrafeedback_binarized_relabelled_test" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Asap7772/ultrafeedback_binarized_relabelled_test
[ "region:us" ]
2024-01-23T02:19:49+00:00
{"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "prompt_id", "dtype": "string"}, {"name": "chosen", "dtype": "string"}, {"name": "rejected", "dtype": "string"}, {"name": "messages", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "score_chosen", "dtype": "float64"}, {"name": "score_rejected", "dtype": "float64"}, {"name": "reward_chosen", "dtype": "float64"}, {"name": "reward_rejected", "dtype": "float64"}], "splits": [{"name": "train_prefs", "num_bytes": 224562, "num_examples": 32}], "download_size": 146530, "dataset_size": 224562}, "configs": [{"config_name": "default", "data_files": [{"split": "train_prefs", "path": "data/train_prefs-*"}]}]}
2024-01-23T02:19:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for "ultrafeedback_binarized_relabelled_test" More Information needed
[ "# Dataset Card for \"ultrafeedback_binarized_relabelled_test\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"ultrafeedback_binarized_relabelled_test\"\n\nMore Information needed" ]
1213f9157059473e2aa59e3adb7eaf45171002ec
# ASPED: An Audio Dataset for Detecting Pedestrians This repo contains the data for the ASPED dataset, presented at ICASSP 2024. - [Paper Link](https://arxiv.org/abs/2309.06531), [Project Homepage](https://urbanaudiosensing.github.io/ASPED.html) - Pavan Seshadri, Chaeyeon Han, Bon-Woo Koo, Noah Posner, Suhbrajit Guhathakurta, Alexander Lerch ## Usage This dataset contains audio and video recordings of pedestrian activity collected at various locations in and around Georgia Tech. Labels of pedestrian counts per each second of audio/video are provided as well, calculated via a computer vision model (Mask2Former trained on msft-coco) using the video recordings. ### Access It is recommended to use the huggingface_hub library to download the dataset from this location. [Info on downloading with huggingface_hub](https://huggingface.co/docs/huggingface_hub/guides/download). Downloading the entire dataset can be done with the following code: ``` from huggingface_hub import snapshot_download snapshot_download(repo_id="pseshadri9/ASPED", repo_type="dataset") ``` Alternatively if you would like to download only the audio or video, pass the ignore_patterns flag to snapshot_download to avoid downloading the entire set. **Audio Only** ``` from huggingface_hub import snapshot_download snapshot_download(repo_id="pseshadri9/ASPED", repo_type="dataset", ignore_patterns="*.mp4") ``` **Video Only** ``` from huggingface_hub import snapshot_download snapshot_download(repo_id="pseshadri9/ASPED", repo_type="dataset", ignore_patterns="*.flac") ``` ## Citation ``` @inproceedings{Seshadri24, title={ASPED: An Audio Dataset for Detecting Pedestrians}, author={Seshadri, Pavan and Han, Chaeyeon and Koo, Bon-Woo and Posner, Noah and Guhathakurta, Suhbrajit and Lerch, Alexander}, booktitle={Proc. of ICASSP 2024}, pages={1--5}, year={2024}, organization={IEEE} } ```
pseshadri9/ASPED
[ "task_categories:audio-classification", "size_categories:n>1T", "license:cc-by-4.0", "pedestrian detection", "arxiv:2309.06531", "region:us" ]
2024-01-23T02:48:23+00:00
{"license": "cc-by-4.0", "size_categories": ["n>1T"], "task_categories": ["audio-classification"], "tags": ["pedestrian detection"]}
2024-01-23T17:00:45+00:00
[ "2309.06531" ]
[]
TAGS #task_categories-audio-classification #size_categories-n>1T #license-cc-by-4.0 #pedestrian detection #arxiv-2309.06531 #region-us
# ASPED: An Audio Dataset for Detecting Pedestrians This repo contains the data for the ASPED dataset, presented at ICASSP 2024. - Paper Link, Project Homepage - Pavan Seshadri, Chaeyeon Han, Bon-Woo Koo, Noah Posner, Suhbrajit Guhathakurta, Alexander Lerch ## Usage This dataset contains audio and video recordings of pedestrian activity collected at various locations in and around Georgia Tech. Labels of pedestrian counts per each second of audio/video are provided as well, calculated via a computer vision model (Mask2Former trained on msft-coco) using the video recordings. ### Access It is recommended to use the huggingface_hub library to download the dataset from this location. Info on downloading with huggingface_hub. Downloading the entire dataset can be done with the following code: Alternatively if you would like to download only the audio or video, pass the ignore_patterns flag to snapshot_download to avoid downloading the entire set. Audio Only Video Only
[ "# ASPED: An Audio Dataset for Detecting Pedestrians\n\nThis repo contains the data for the ASPED dataset, presented at ICASSP 2024.\n- Paper Link, Project Homepage\n\n- Pavan Seshadri, Chaeyeon Han, Bon-Woo Koo, Noah Posner, Suhbrajit Guhathakurta, Alexander Lerch", "## Usage\nThis dataset contains audio and video recordings of pedestrian activity collected at various locations in and around Georgia Tech. \n\nLabels of pedestrian counts per each second of audio/video are provided as well, calculated via a computer vision model (Mask2Former trained on msft-coco) using the video recordings.", "### Access\nIt is recommended to use the huggingface_hub library to download the dataset from this location. Info on downloading with huggingface_hub.\n\nDownloading the entire dataset can be done with the following code:\n\nAlternatively if you would like to download only the audio or video, pass the ignore_patterns flag to snapshot_download to avoid downloading the entire set. \n\nAudio Only\n\n\nVideo Only" ]
[ "TAGS\n#task_categories-audio-classification #size_categories-n>1T #license-cc-by-4.0 #pedestrian detection #arxiv-2309.06531 #region-us \n", "# ASPED: An Audio Dataset for Detecting Pedestrians\n\nThis repo contains the data for the ASPED dataset, presented at ICASSP 2024.\n- Paper Link, Project Homepage\n\n- Pavan Seshadri, Chaeyeon Han, Bon-Woo Koo, Noah Posner, Suhbrajit Guhathakurta, Alexander Lerch", "## Usage\nThis dataset contains audio and video recordings of pedestrian activity collected at various locations in and around Georgia Tech. \n\nLabels of pedestrian counts per each second of audio/video are provided as well, calculated via a computer vision model (Mask2Former trained on msft-coco) using the video recordings.", "### Access\nIt is recommended to use the huggingface_hub library to download the dataset from this location. Info on downloading with huggingface_hub.\n\nDownloading the entire dataset can be done with the following code:\n\nAlternatively if you would like to download only the audio or video, pass the ignore_patterns flag to snapshot_download to avoid downloading the entire set. \n\nAudio Only\n\n\nVideo Only" ]
6e9b5f47025706bb360f23be16ac5e29288a50da
纯手工用眼睛和手细细切做臊子的中文长文本语料 下载:`$env:HF_ENDPOINT="https://hf-mirror.com"; python -c "from huggingface_hub import snapshot_download; snapshot_download(repo_id='Limour/b-corpus', repo_type='dataset', local_dir=r'D:\datasets\tmp')"` 1. 清洗要求:`全角转半角` | `繁体转简体` 2. 内部去重:`s/(.)\1{3,}/$1$1$1/g` | `s/(.{2,}?)\1{2,}/$1/g` | `s/(((^.*$)[\r\n]*){1,10}?)\1{1,}/$1/g` 3. 杂项:`s/^([\x00-\x3e\x40-\xff]{1,4})[\x00-\xff]*:/$1:/g` 4. 错字:`s/巴巴/爸爸/g` | `s/阿阿+/啊啊/g` | `s/很抄/很吵/g` | `s/能苟/能够/g` 5. 错字:`s/拉\b/啦/g` | `s/巴\b/吧/g` | `s/阿\b/啊/g` 6. 一个完整对话为一个文件 7. 一行的格式为 `{NAME}:{DIALOGUE}`(':'为中文冒号) 8. 旁白的 {NAME} 为 `旁白` 9. 未知人物的 {NAME} 为 `?` 10. 可以从旁白推断的主角的 {NAME} 为 `我/名字`,否则为 `名字` 11. 如万华镜等主角名字改变的,`名字` 变,`我/` 不变 12. 除 `b-corpus\视觉小说\format` 外的语料的作用是增加多样性 13. 完整保留涩涩内容,部分内容涉及*错误世界观和道德伦理* 14. 注意:部分多视角的语料,随着旁白的改变,主角也可能会改变 15. `b-corpus\v-corpus-en` 来自 [alpindale](https://huggingface.co/alpindale)/[visual-novels](https://huggingface.co/datasets/alpindale/visual-novels), 或许可以翻译成中文? 16. 将数据按 `制作会社\作品名` 进行了整理,并修复了一些小错误,保存在 `v-corpus-zh` 目录下 ```python from opencc import OpenCC cc = OpenCC('t2s') # 't2s'表示繁体转简体 import unicodedata def fullwidth_to_halfwidth(input_str): return ''.join((unicodedata.normalize('NFKC', char) for char in input_str)) def clearT(s): s = cc.convert(fullwidth_to_halfwidth(s)) return s.strip() ``` + 错误价值观举例 ```txt 旁白:她抵达了终之空。 旁白:她已经超越了万物.... 旁白:超越万物.... 旁白:也就是, 旁白:抵达极致.... 女信徒A:野崎跳下去了! 女信徒A:真棒。我们跟上吧!趁俗世的权利还没阻止我们抵达极致!! 女信徒A:救世主大人 女信徒A:我先走一步 我/卓司:嗯 女性信者B:我也走了 女信徒C:我也....各位,保重 男信徒A:我也要....跟这不完美的世界说再见了。各位,再见.... 由香:在完美的世界里,我们也要在一起, ?:嗯—— 由香:再见,救世主大人。非常感谢 我/卓司:嗯.... 男信徒B:这样就能跟这个世界说再见了....我讨厌这个世界 男信徒B:以后就能在完美的世界里—— 男信徒B:在完美的世界里,过上幸福的日子!! 男信徒B:没有家人 男信徒B:没有老师 男信徒B:没有考试 男信徒B:也没有学校 男信徒C:完美的世界,我来了!! 旁白:....信徒们一个个地抵达终之空.... ``` ```txt 我/卓司:真的会有祝福自己诞生的人吗? 我/卓司:正因为诅咒一切,在这个世界诞生,因为一切都是谬误,我们才—— 行人:是啊,没错,是这样。如果诞生是惩罚的话,我们在诞生的瞬间就是丧家犬了.... 我/卓司:那为什么—— 行人:所以就要勒紧刚诞生的婴儿的脖子? 旁白:我有些惊讶。 旁白:“掐住新生婴儿的脖子,将其人生在10分钟结束,谁也不会有异议” 旁白:虽然这的确是我说的,但那是在方舟的演说上说的。我不认为行人会知道。 旁白:这样的话。 我/卓司:我们在思考同样的事? 行人:谁知道呢?但我能断言,你的所作所为是错误的 旁白:即使如此,也要断定我是错的。 旁白:虽然很想听听他的理由,但若他是最后一人,也就没必要听了。 旁白:因为这是我在思考的事,而水上行人与我的判断完全相反—— 我/卓司:被诅咒的,生 我/卓司:被祝福的,生 我/卓司:亦或者是—— 我/卓司:被当做诅咒的,死 我/卓司:把这样的死,当做祝福接受 我/卓司:到这里为止,我和你的想法应该是一样的吧? ``` + 涩涩内容举例 ```txt 我/块斗:「哪个才是本体啊!?」 月咏:「都是本体哦。这是将自身的存在多次复制的结果」 月咏:「现技术已经支持了,只要大脑适应,就能复制多个自己,并随意行动」 月咏:「就好比自己的神经伸展到外部了的感觉」 我/块斗:「嚯嚯......」 旁白:我也能做到这样的吗......? 月咏:「我一开始就是完全适应的状态......所以这种事也能做到」 旁白:齐刷刷走来的月咏大军将我围住,并把我推倒。 我/块斗:「哇......怎么了?」 月咏:「......在这里做爱的话,就不会对身体有负担了」 月咏:「但是,这样会将大脑的感受更直接的引导出来」 我/块斗:「......也就是说?」 月咏:「......非常抱歉,主人」 月咏:「我......大概是个相当色情的女孩子」 我/块斗:「放心吧。我对你的感情绝对不比你差」 月咏:「好开心......请让我好好侍奉您一番吧」 旁白:两侧的月咏靠近过来,并将她们的嘴唇贴了上来。 月咏1:「嗯......呼......啾......今天,请尽情享受吧......啾......」 旁白:站在身旁的月咏吸住我的嘴唇。 旁白:而且还抓住我的手往自己胸上压。 月咏1:「哈噗......啾、嗯嗯......啊哈......主人,你知道,啾......我现在心脏砰砰跳个不停吗......?」 月咏2:「要这样说,我的心脏现在也......扑通扑通地跳......主人......嗯嗯、请你确认一下......」 旁白:位于另一侧的月咏则是把我的手放在私处。 旁白:指尖有种湿润的触感。 月咏2:「啊......嗯、嗯......主人的手指......啊、啊......我的阴道里面,躁动不停......嗯嗯......好舒服......」 月咏1:「也请尽情摸我的胸部......嗯、啾、嗯溜噜......溜噜、啾......!」 旁白:像是为了对抗让我触摸秘部的月咏,另一个月咏吸住了我的舌头。 我/块斗:「啾......哈啊、哈啊......呜喔喔!?」 月咏3:「呼......嗯、嗯......呼呼、主人,舒服吗......?」 旁白:往身下看去,发现第三个月咏用胸部夹住了我的股间。 ```
Limour/b-corpus
[ "task_categories:text-generation", "language:zh", "license:cc-by-nc-sa-4.0", "not-for-all-audiences", "region:us" ]
2024-01-23T03:48:54+00:00
{"language": ["zh"], "license": "cc-by-nc-sa-4.0", "task_categories": ["text-generation"], "tags": ["not-for-all-audiences"]}
2024-02-02T11:32:44+00:00
[]
[ "zh" ]
TAGS #task_categories-text-generation #language-Chinese #license-cc-by-nc-sa-4.0 #not-for-all-audiences #region-us
纯手工用眼睛和手细细切做臊子的中文长文本语料 下载:'$env:HF_ENDPOINT="URL"; python -c "from huggingface_hub import snapshot_download; snapshot_download(repo_id='Limour/b-corpus', repo_type='dataset', local_dir=r'D:\datasets\tmp')"' 1. 清洗要求:'全角转半角' | '繁体转简体' 2. 内部去重:'s/(.)\1{3,}/$1$1$1/g' | 's/(.{2,}?)\1{2,}/$1/g' | 's/(((^.*$)[\r\n]*){1,10}?)\1{1,}/$1/g' 3. 杂项:'s/^([\x00-\x3e\x40-\xff]{1,4})[\x00-\xff]*:/$1:/g' 4. 错字:'s/巴巴/爸爸/g' | 's/阿阿+/啊啊/g' | 's/很抄/很吵/g' | 's/能苟/能够/g' 5. 错字:'s/拉\b/啦/g' | 's/巴\b/吧/g' | 's/阿\b/啊/g' 6. 一个完整对话为一个文件 7. 一行的格式为 '{NAME}:{DIALOGUE}'(':'为中文冒号) 8. 旁白的 {NAME} 为 '旁白' 9. 未知人物的 {NAME} 为 '?' 10. 可以从旁白推断的主角的 {NAME} 为 '我/名字',否则为 '名字' 11. 如万华镜等主角名字改变的,'名字' 变,'我/' 不变 12. 除 'b-corpus\视觉小说\format' 外的语料的作用是增加多样性 13. 完整保留涩涩内容,部分内容涉及*错误世界观和道德伦理* 14. 注意:部分多视角的语料,随着旁白的改变,主角也可能会改变 15. 'b-corpus\v-corpus-en' 来自 alpindale/visual-novels, 或许可以翻译成中文? 16. 将数据按 '制作会社\作品名' 进行了整理,并修复了一些小错误,保存在 'v-corpus-zh' 目录下 + 错误价值观举例 + 涩涩内容举例
[]
[ "TAGS\n#task_categories-text-generation #language-Chinese #license-cc-by-nc-sa-4.0 #not-for-all-audiences #region-us \n" ]
f6009cfd897201550267c0d23ce339b04f6aabbc
Create a canadian flag using this information The flag could incorporate elements like hands holding a waving flag, symbolizing unity and strength. Additionally, vibrant colors could represent the diversity and resilience of the Canadian people. The flag might feature symbols such as doves or hearts to represent love and peace. The colors could be chosen to evoke a sense of depth and emotion, reflecting the complex themes found in the song. 2
chalil/image
[ "region:us" ]
2024-01-23T03:56:22+00:00
{}
2024-01-23T03:56:55+00:00
[]
[]
TAGS #region-us
Create a canadian flag using this information The flag could incorporate elements like hands holding a waving flag, symbolizing unity and strength. Additionally, vibrant colors could represent the diversity and resilience of the Canadian people. The flag might feature symbols such as doves or hearts to represent love and peace. The colors could be chosen to evoke a sense of depth and emotion, reflecting the complex themes found in the song. 2
[]
[ "TAGS\n#region-us \n" ]
b0c674b8a898fc213cb908494d456352bbb10468
📚 Dataset Introduction · This dataset is constructed with generated data and contains a total of 1000 human face images with a resolution of 1024x1024. · The filenames are the corresponding labels for the images, formatted as "Number"-"Gender"-"Age". · The dataset includes 500 images of each gender, distributed across different age groups (1-10, 11-20, 21-30, 31-40, over 40) with 100 images for each group. This dataset is a sample of 1000 images from the full collection. For more data inquiries, feel free to contact us 😄 🤖 About Us: We are developers from China, dedicated to accelerating AI development with high-quality data. 📮 Contact information: [email protected]
JoinDatawithme/Humanface_of_various_age_groups
[ "size_categories:100K<n<1M", "language:en", "license:apache-2.0", "face", "humanface", "ml", "face recognition", "Generated data", "dataset", "region:us" ]
2024-01-23T03:56:35+00:00
{"language": ["en"], "license": "apache-2.0", "size_categories": ["100K<n<1M"], "tags": ["face", "humanface", "ml", "face recognition", "Generated data", "dataset"]}
2024-01-23T04:33:16+00:00
[]
[ "en" ]
TAGS #size_categories-100K<n<1M #language-English #license-apache-2.0 #face #humanface #ml #face recognition #Generated data #dataset #region-us
Dataset Introduction · This dataset is constructed with generated data and contains a total of 1000 human face images with a resolution of 1024x1024. · The filenames are the corresponding labels for the images, formatted as "Number"-"Gender"-"Age". · The dataset includes 500 images of each gender, distributed across different age groups (1-10, 11-20, 21-30, 31-40, over 40) with 100 images for each group. This dataset is a sample of 1000 images from the full collection. For more data inquiries, feel free to contact us About Us: We are developers from China, dedicated to accelerating AI development with high-quality data. Contact information: huawuque@URL
[]
[ "TAGS\n#size_categories-100K<n<1M #language-English #license-apache-2.0 #face #humanface #ml #face recognition #Generated data #dataset #region-us \n" ]
ca173948484bdb2ced9a7a817e16cfdd1245b7a2
# Dataset Card for Evaluation run of ibivibiv/strix-rufipes-70b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [ibivibiv/strix-rufipes-70b](https://huggingface.co/ibivibiv/strix-rufipes-70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ibivibiv__strix-rufipes-70b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T03:56:38.126146](https://huggingface.co/datasets/open-llm-leaderboard/details_ibivibiv__strix-rufipes-70b/blob/main/results_2024-01-23T03-56-38.126146.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6910894247381432, "acc_stderr": 0.03074765419800289, "acc_norm": 0.6948907257668793, "acc_norm_stderr": 0.03135484817423871, "mc1": 0.40024479804161567, "mc1_stderr": 0.01715160555574914, "mc2": 0.5672072620719157, "mc2_stderr": 0.014748654287331589 }, "harness|arc:challenge|25": { "acc": 0.674061433447099, "acc_stderr": 0.013697432466693246, "acc_norm": 0.7133105802047781, "acc_norm_stderr": 0.013214986329274777 }, "harness|hellaswag|10": { "acc": 0.6898028281218881, "acc_stderr": 0.004616288245259753, "acc_norm": 0.8786098386775543, "acc_norm_stderr": 0.003259127057668171 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6370370370370371, "acc_stderr": 0.04153948404742399, "acc_norm": 0.6370370370370371, "acc_norm_stderr": 0.04153948404742399 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7960526315789473, "acc_stderr": 0.032790004063100515, "acc_norm": 0.7960526315789473, "acc_norm_stderr": 0.032790004063100515 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.73, "acc_stderr": 0.04461960433384741, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7169811320754716, "acc_stderr": 0.027724236492700918, "acc_norm": 0.7169811320754716, "acc_norm_stderr": 0.027724236492700918 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8125, "acc_stderr": 0.032639560491693344, "acc_norm": 0.8125, "acc_norm_stderr": 0.032639560491693344 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6820809248554913, "acc_stderr": 0.0355068398916558, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.0355068398916558 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287534, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6851063829787234, "acc_stderr": 0.03036358219723817, "acc_norm": 0.6851063829787234, "acc_norm_stderr": 0.03036358219723817 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.0470070803355104, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.0470070803355104 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41534391534391535, "acc_stderr": 0.025379524910778387, "acc_norm": 0.41534391534391535, "acc_norm_stderr": 0.025379524910778387 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.48412698412698413, "acc_stderr": 0.04469881854072606, "acc_norm": 0.48412698412698413, "acc_norm_stderr": 0.04469881854072606 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8064516129032258, "acc_stderr": 0.022475258525536057, "acc_norm": 0.8064516129032258, "acc_norm_stderr": 0.022475258525536057 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8424242424242424, "acc_stderr": 0.02845038880528437, "acc_norm": 0.8424242424242424, "acc_norm_stderr": 0.02845038880528437 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8787878787878788, "acc_stderr": 0.023253157951942088, "acc_norm": 0.8787878787878788, "acc_norm_stderr": 0.023253157951942088 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9326424870466321, "acc_stderr": 0.018088393839078912, "acc_norm": 0.9326424870466321, "acc_norm_stderr": 0.018088393839078912 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.717948717948718, "acc_stderr": 0.0228158130988966, "acc_norm": 0.717948717948718, "acc_norm_stderr": 0.0228158130988966 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.027840811495871923, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.027840811495871923 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7521008403361344, "acc_stderr": 0.028047967224176892, "acc_norm": 0.7521008403361344, "acc_norm_stderr": 0.028047967224176892 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.48344370860927155, "acc_stderr": 0.040802441856289715, "acc_norm": 0.48344370860927155, "acc_norm_stderr": 0.040802441856289715 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8788990825688073, "acc_stderr": 0.013987618292389713, "acc_norm": 0.8788990825688073, "acc_norm_stderr": 0.013987618292389713 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5277777777777778, "acc_stderr": 0.0340470532865388, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9019607843137255, "acc_stderr": 0.0208711184555521, "acc_norm": 0.9019607843137255, "acc_norm_stderr": 0.0208711184555521 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8776371308016878, "acc_stderr": 0.02133174182974679, "acc_norm": 0.8776371308016878, "acc_norm_stderr": 0.02133174182974679 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7802690582959642, "acc_stderr": 0.02779017706438359, "acc_norm": 0.7802690582959642, "acc_norm_stderr": 0.02779017706438359 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.03336820338476075, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.03336820338476075 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8677685950413223, "acc_stderr": 0.03092278832044578, "acc_norm": 0.8677685950413223, "acc_norm_stderr": 0.03092278832044578 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8148148148148148, "acc_stderr": 0.03755265865037182, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.03755265865037182 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7914110429447853, "acc_stderr": 0.031921934489347235, "acc_norm": 0.7914110429447853, "acc_norm_stderr": 0.031921934489347235 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5357142857142857, "acc_stderr": 0.04733667890053756, "acc_norm": 0.5357142857142857, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.034926064766237906, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.034926064766237906 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8974358974358975, "acc_stderr": 0.019875655027867464, "acc_norm": 0.8974358974358975, "acc_norm_stderr": 0.019875655027867464 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8569604086845466, "acc_stderr": 0.012520023176796534, "acc_norm": 0.8569604086845466, "acc_norm_stderr": 0.012520023176796534 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7687861271676301, "acc_stderr": 0.02269865716785571, "acc_norm": 0.7687861271676301, "acc_norm_stderr": 0.02269865716785571 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5184357541899441, "acc_stderr": 0.01671113049778282, "acc_norm": 0.5184357541899441, "acc_norm_stderr": 0.01671113049778282 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7679738562091504, "acc_stderr": 0.024170840879340873, "acc_norm": 0.7679738562091504, "acc_norm_stderr": 0.024170840879340873 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7620578778135049, "acc_stderr": 0.024185150647818707, "acc_norm": 0.7620578778135049, "acc_norm_stderr": 0.024185150647818707 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8271604938271605, "acc_stderr": 0.021038517770157375, "acc_norm": 0.8271604938271605, "acc_norm_stderr": 0.021038517770157375 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5390070921985816, "acc_stderr": 0.02973659252642444, "acc_norm": 0.5390070921985816, "acc_norm_stderr": 0.02973659252642444 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5743155149934811, "acc_stderr": 0.01262839355181194, "acc_norm": 0.5743155149934811, "acc_norm_stderr": 0.01262839355181194 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6911764705882353, "acc_stderr": 0.028064998167040094, "acc_norm": 0.6911764705882353, "acc_norm_stderr": 0.028064998167040094 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7565359477124183, "acc_stderr": 0.01736247376214662, "acc_norm": 0.7565359477124183, "acc_norm_stderr": 0.01736247376214662 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04265792110940588, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04265792110940588 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8, "acc_stderr": 0.02560737598657916, "acc_norm": 0.8, "acc_norm_stderr": 0.02560737598657916 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8507462686567164, "acc_stderr": 0.025196929874827054, "acc_norm": 0.8507462686567164, "acc_norm_stderr": 0.025196929874827054 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352202, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352202 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8596491228070176, "acc_stderr": 0.0266405825391332, "acc_norm": 0.8596491228070176, "acc_norm_stderr": 0.0266405825391332 }, "harness|truthfulqa:mc|0": { "mc1": 0.40024479804161567, "mc1_stderr": 0.01715160555574914, "mc2": 0.5672072620719157, "mc2_stderr": 0.014748654287331589 }, "harness|winogrande|5": { "acc": 0.8476716653512234, "acc_stderr": 0.010099208246065614 }, "harness|gsm8k|5": { "acc": 0.5382865807429871, "acc_stderr": 0.01373204822701668 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_ibivibiv__strix-rufipes-70b
[ "region:us" ]
2024-01-23T03:58:59+00:00
{"pretty_name": "Evaluation run of ibivibiv/strix-rufipes-70b", "dataset_summary": "Dataset automatically created during the evaluation run of model [ibivibiv/strix-rufipes-70b](https://huggingface.co/ibivibiv/strix-rufipes-70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ibivibiv__strix-rufipes-70b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T03:56:38.126146](https://huggingface.co/datasets/open-llm-leaderboard/details_ibivibiv__strix-rufipes-70b/blob/main/results_2024-01-23T03-56-38.126146.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6910894247381432,\n \"acc_stderr\": 0.03074765419800289,\n \"acc_norm\": 0.6948907257668793,\n \"acc_norm_stderr\": 0.03135484817423871,\n \"mc1\": 0.40024479804161567,\n \"mc1_stderr\": 0.01715160555574914,\n \"mc2\": 0.5672072620719157,\n \"mc2_stderr\": 0.014748654287331589\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.674061433447099,\n \"acc_stderr\": 0.013697432466693246,\n \"acc_norm\": 0.7133105802047781,\n \"acc_norm_stderr\": 0.013214986329274777\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6898028281218881,\n \"acc_stderr\": 0.004616288245259753,\n \"acc_norm\": 0.8786098386775543,\n \"acc_norm_stderr\": 0.003259127057668171\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n \"acc_stderr\": 0.04153948404742399,\n \"acc_norm\": 0.6370370370370371,\n \"acc_norm_stderr\": 0.04153948404742399\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7960526315789473,\n \"acc_stderr\": 0.032790004063100515,\n \"acc_norm\": 0.7960526315789473,\n \"acc_norm_stderr\": 0.032790004063100515\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7169811320754716,\n \"acc_stderr\": 0.027724236492700918,\n \"acc_norm\": 0.7169811320754716,\n \"acc_norm_stderr\": 0.027724236492700918\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8125,\n \"acc_stderr\": 0.032639560491693344,\n \"acc_norm\": 0.8125,\n \"acc_norm_stderr\": 0.032639560491693344\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.0355068398916558,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.0355068398916558\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6851063829787234,\n \"acc_stderr\": 0.03036358219723817,\n \"acc_norm\": 0.6851063829787234,\n \"acc_norm_stderr\": 0.03036358219723817\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.0470070803355104,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.0470070803355104\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41534391534391535,\n \"acc_stderr\": 0.025379524910778387,\n \"acc_norm\": 0.41534391534391535,\n \"acc_norm_stderr\": 0.025379524910778387\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.48412698412698413,\n \"acc_stderr\": 0.04469881854072606,\n \"acc_norm\": 0.48412698412698413,\n \"acc_norm_stderr\": 0.04469881854072606\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8064516129032258,\n \"acc_stderr\": 0.022475258525536057,\n \"acc_norm\": 0.8064516129032258,\n \"acc_norm_stderr\": 0.022475258525536057\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8424242424242424,\n \"acc_stderr\": 0.02845038880528437,\n \"acc_norm\": 0.8424242424242424,\n \"acc_norm_stderr\": 0.02845038880528437\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8787878787878788,\n \"acc_stderr\": 0.023253157951942088,\n \"acc_norm\": 0.8787878787878788,\n \"acc_norm_stderr\": 0.023253157951942088\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9326424870466321,\n \"acc_stderr\": 0.018088393839078912,\n \"acc_norm\": 0.9326424870466321,\n \"acc_norm_stderr\": 0.018088393839078912\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.717948717948718,\n \"acc_stderr\": 0.0228158130988966,\n \"acc_norm\": 0.717948717948718,\n \"acc_norm_stderr\": 0.0228158130988966\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.027840811495871923,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.027840811495871923\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7521008403361344,\n \"acc_stderr\": 0.028047967224176892,\n \"acc_norm\": 0.7521008403361344,\n \"acc_norm_stderr\": 0.028047967224176892\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.48344370860927155,\n \"acc_stderr\": 0.040802441856289715,\n \"acc_norm\": 0.48344370860927155,\n \"acc_norm_stderr\": 0.040802441856289715\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8788990825688073,\n \"acc_stderr\": 0.013987618292389713,\n \"acc_norm\": 0.8788990825688073,\n \"acc_norm_stderr\": 0.013987618292389713\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9019607843137255,\n \"acc_stderr\": 0.0208711184555521,\n \"acc_norm\": 0.9019607843137255,\n \"acc_norm_stderr\": 0.0208711184555521\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8776371308016878,\n \"acc_stderr\": 0.02133174182974679,\n \"acc_norm\": 0.8776371308016878,\n \"acc_norm_stderr\": 0.02133174182974679\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7802690582959642,\n \"acc_stderr\": 0.02779017706438359,\n \"acc_norm\": 0.7802690582959642,\n \"acc_norm_stderr\": 0.02779017706438359\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.03336820338476075,\n \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.03336820338476075\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8677685950413223,\n \"acc_stderr\": 0.03092278832044578,\n \"acc_norm\": 0.8677685950413223,\n \"acc_norm_stderr\": 0.03092278832044578\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.03755265865037182,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.03755265865037182\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7914110429447853,\n \"acc_stderr\": 0.031921934489347235,\n \"acc_norm\": 0.7914110429447853,\n \"acc_norm_stderr\": 0.031921934489347235\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5357142857142857,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.5357142857142857,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.034926064766237906,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.034926064766237906\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8974358974358975,\n \"acc_stderr\": 0.019875655027867464,\n \"acc_norm\": 0.8974358974358975,\n \"acc_norm_stderr\": 0.019875655027867464\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8569604086845466,\n \"acc_stderr\": 0.012520023176796534,\n \"acc_norm\": 0.8569604086845466,\n \"acc_norm_stderr\": 0.012520023176796534\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7687861271676301,\n \"acc_stderr\": 0.02269865716785571,\n \"acc_norm\": 0.7687861271676301,\n \"acc_norm_stderr\": 0.02269865716785571\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5184357541899441,\n \"acc_stderr\": 0.01671113049778282,\n \"acc_norm\": 0.5184357541899441,\n \"acc_norm_stderr\": 0.01671113049778282\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7679738562091504,\n \"acc_stderr\": 0.024170840879340873,\n \"acc_norm\": 0.7679738562091504,\n \"acc_norm_stderr\": 0.024170840879340873\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7620578778135049,\n \"acc_stderr\": 0.024185150647818707,\n \"acc_norm\": 0.7620578778135049,\n \"acc_norm_stderr\": 0.024185150647818707\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8271604938271605,\n \"acc_stderr\": 0.021038517770157375,\n \"acc_norm\": 0.8271604938271605,\n \"acc_norm_stderr\": 0.021038517770157375\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5390070921985816,\n \"acc_stderr\": 0.02973659252642444,\n \"acc_norm\": 0.5390070921985816,\n \"acc_norm_stderr\": 0.02973659252642444\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5743155149934811,\n \"acc_stderr\": 0.01262839355181194,\n \"acc_norm\": 0.5743155149934811,\n \"acc_norm_stderr\": 0.01262839355181194\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6911764705882353,\n \"acc_stderr\": 0.028064998167040094,\n \"acc_norm\": 0.6911764705882353,\n \"acc_norm_stderr\": 0.028064998167040094\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7565359477124183,\n \"acc_stderr\": 0.01736247376214662,\n \"acc_norm\": 0.7565359477124183,\n \"acc_norm_stderr\": 0.01736247376214662\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04265792110940588,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04265792110940588\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.02560737598657916,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.02560737598657916\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n \"acc_stderr\": 0.025196929874827054,\n \"acc_norm\": 0.8507462686567164,\n \"acc_norm_stderr\": 0.025196929874827054\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352202,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352202\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8596491228070176,\n \"acc_stderr\": 0.0266405825391332,\n \"acc_norm\": 0.8596491228070176,\n \"acc_norm_stderr\": 0.0266405825391332\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.40024479804161567,\n \"mc1_stderr\": 0.01715160555574914,\n \"mc2\": 0.5672072620719157,\n \"mc2_stderr\": 0.014748654287331589\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8476716653512234,\n \"acc_stderr\": 0.010099208246065614\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5382865807429871,\n \"acc_stderr\": 0.01373204822701668\n }\n}\n```", "repo_url": "https://huggingface.co/ibivibiv/strix-rufipes-70b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|arc:challenge|25_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|gsm8k|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hellaswag|10_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T03-56-38.126146.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["**/details_harness|winogrande|5_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T03-56-38.126146.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T03_56_38.126146", "path": ["results_2024-01-23T03-56-38.126146.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T03-56-38.126146.parquet"]}]}]}
2024-01-23T03:59:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ibivibiv/strix-rufipes-70b Dataset automatically created during the evaluation run of model ibivibiv/strix-rufipes-70b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T03:56:38.126146(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of ibivibiv/strix-rufipes-70b\n\n\n\nDataset automatically created during the evaluation run of model ibivibiv/strix-rufipes-70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T03:56:38.126146(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ibivibiv/strix-rufipes-70b\n\n\n\nDataset automatically created during the evaluation run of model ibivibiv/strix-rufipes-70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T03:56:38.126146(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
3821e224dcaf71dfe44e0852d5dfc2eb776429c5
# Dataset Card for Evaluation run of BarryFutureman/WildWest-Variant3-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [BarryFutureman/WildWest-Variant3-7B](https://huggingface.co/BarryFutureman/WildWest-Variant3-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_BarryFutureman__WildWest-Variant3-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T04:01:23.522881](https://huggingface.co/datasets/open-llm-leaderboard/details_BarryFutureman__WildWest-Variant3-7B/blob/main/results_2024-01-23T04-01-23.522881.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6536937523121658, "acc_stderr": 0.031970346398364005, "acc_norm": 0.6530672708233058, "acc_norm_stderr": 0.03263778918360402, "mc1": 0.5410036719706243, "mc1_stderr": 0.017444544447661206, "mc2": 0.6808667640260779, "mc2_stderr": 0.015144269926582927 }, "harness|arc:challenge|25": { "acc": 0.7013651877133106, "acc_stderr": 0.013374078615068744, "acc_norm": 0.7320819112627986, "acc_norm_stderr": 0.012942030195136444 }, "harness|hellaswag|10": { "acc": 0.7144991037641903, "acc_stderr": 0.00450729619622781, "acc_norm": 0.8836885082652858, "acc_norm_stderr": 0.003199428675985865 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6592592592592592, "acc_stderr": 0.04094376269996792, "acc_norm": 0.6592592592592592, "acc_norm_stderr": 0.04094376269996792 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6776315789473685, "acc_stderr": 0.03803510248351585, "acc_norm": 0.6776315789473685, "acc_norm_stderr": 0.03803510248351585 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7056603773584905, "acc_stderr": 0.02804918631569525, "acc_norm": 0.7056603773584905, "acc_norm_stderr": 0.02804918631569525 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.032232762667117124, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.032232762667117124 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.02548718714785938, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.02548718714785938 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7903225806451613, "acc_stderr": 0.023157879349083522, "acc_norm": 0.7903225806451613, "acc_norm_stderr": 0.023157879349083522 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5221674876847291, "acc_stderr": 0.03514528562175007, "acc_norm": 0.5221674876847291, "acc_norm_stderr": 0.03514528562175007 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.797979797979798, "acc_stderr": 0.028606204289229872, "acc_norm": 0.797979797979798, "acc_norm_stderr": 0.028606204289229872 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9067357512953368, "acc_stderr": 0.02098685459328973, "acc_norm": 0.9067357512953368, "acc_norm_stderr": 0.02098685459328973 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6743589743589744, "acc_stderr": 0.02375966576741229, "acc_norm": 0.6743589743589744, "acc_norm_stderr": 0.02375966576741229 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.029185714949857416, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.029185714949857416 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.03038835355188679, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.03038835355188679 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.015630022970092437, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.015630022970092437 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5138888888888888, "acc_stderr": 0.03408655867977749, "acc_norm": 0.5138888888888888, "acc_norm_stderr": 0.03408655867977749 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.02450980392156861, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.02450980392156861 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7974683544303798, "acc_stderr": 0.026160568246601443, "acc_norm": 0.7974683544303798, "acc_norm_stderr": 0.026160568246601443 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7040358744394619, "acc_stderr": 0.030636591348699803, "acc_norm": 0.7040358744394619, "acc_norm_stderr": 0.030636591348699803 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8091603053435115, "acc_stderr": 0.03446513350752599, "acc_norm": 0.8091603053435115, "acc_norm_stderr": 0.03446513350752599 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.033519538795212696, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973646, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973646 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.020588491316092368, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.020588491316092368 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8237547892720306, "acc_stderr": 0.013625556907993462, "acc_norm": 0.8237547892720306, "acc_norm_stderr": 0.013625556907993462 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7514450867052023, "acc_stderr": 0.023267528432100174, "acc_norm": 0.7514450867052023, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4301675977653631, "acc_stderr": 0.01655860163604103, "acc_norm": 0.4301675977653631, "acc_norm_stderr": 0.01655860163604103 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7124183006535948, "acc_stderr": 0.025917806117147158, "acc_norm": 0.7124183006535948, "acc_norm_stderr": 0.025917806117147158 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.02583989833487798, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5035460992907801, "acc_stderr": 0.02982674915328092, "acc_norm": 0.5035460992907801, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4595827900912647, "acc_stderr": 0.012728446067669971, "acc_norm": 0.4595827900912647, "acc_norm_stderr": 0.012728446067669971 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.02841820861940676, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.02841820861940676 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6650326797385621, "acc_stderr": 0.01909422816700033, "acc_norm": 0.6650326797385621, "acc_norm_stderr": 0.01909422816700033 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.0282638899437846, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.0282638899437846 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.5410036719706243, "mc1_stderr": 0.017444544447661206, "mc2": 0.6808667640260779, "mc2_stderr": 0.015144269926582927 }, "harness|winogrande|5": { "acc": 0.8437253354380426, "acc_stderr": 0.0102053517918735 }, "harness|gsm8k|5": { "acc": 0.7005307050796058, "acc_stderr": 0.012616300735519649 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_BarryFutureman__WildWest-Variant3-7B
[ "region:us" ]
2024-01-23T04:03:40+00:00
{"pretty_name": "Evaluation run of BarryFutureman/WildWest-Variant3-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [BarryFutureman/WildWest-Variant3-7B](https://huggingface.co/BarryFutureman/WildWest-Variant3-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_BarryFutureman__WildWest-Variant3-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T04:01:23.522881](https://huggingface.co/datasets/open-llm-leaderboard/details_BarryFutureman__WildWest-Variant3-7B/blob/main/results_2024-01-23T04-01-23.522881.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6536937523121658,\n \"acc_stderr\": 0.031970346398364005,\n \"acc_norm\": 0.6530672708233058,\n \"acc_norm_stderr\": 0.03263778918360402,\n \"mc1\": 0.5410036719706243,\n \"mc1_stderr\": 0.017444544447661206,\n \"mc2\": 0.6808667640260779,\n \"mc2_stderr\": 0.015144269926582927\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7013651877133106,\n \"acc_stderr\": 0.013374078615068744,\n \"acc_norm\": 0.7320819112627986,\n \"acc_norm_stderr\": 0.012942030195136444\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7144991037641903,\n \"acc_stderr\": 0.00450729619622781,\n \"acc_norm\": 0.8836885082652858,\n \"acc_norm_stderr\": 0.003199428675985865\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6592592592592592,\n \"acc_stderr\": 0.04094376269996792,\n \"acc_norm\": 0.6592592592592592,\n \"acc_norm_stderr\": 0.04094376269996792\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6776315789473685,\n \"acc_stderr\": 0.03803510248351585,\n \"acc_norm\": 0.6776315789473685,\n \"acc_norm_stderr\": 0.03803510248351585\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7056603773584905,\n \"acc_stderr\": 0.02804918631569525,\n \"acc_norm\": 0.7056603773584905,\n \"acc_norm_stderr\": 0.02804918631569525\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.032232762667117124,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.032232762667117124\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.02548718714785938,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.02548718714785938\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.023157879349083522,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.023157879349083522\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5221674876847291,\n \"acc_stderr\": 0.03514528562175007,\n \"acc_norm\": 0.5221674876847291,\n \"acc_norm_stderr\": 0.03514528562175007\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.028606204289229872,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.028606204289229872\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9067357512953368,\n \"acc_stderr\": 0.02098685459328973,\n \"acc_norm\": 0.9067357512953368,\n \"acc_norm_stderr\": 0.02098685459328973\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.029185714949857416,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.029185714949857416\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.03038835355188679,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.03038835355188679\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.015630022970092437,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.015630022970092437\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5138888888888888,\n \"acc_stderr\": 0.03408655867977749,\n \"acc_norm\": 0.5138888888888888,\n \"acc_norm_stderr\": 0.03408655867977749\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.02450980392156861,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.02450980392156861\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7974683544303798,\n \"acc_stderr\": 0.026160568246601443,\n \"acc_norm\": 0.7974683544303798,\n \"acc_norm_stderr\": 0.026160568246601443\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7040358744394619,\n \"acc_stderr\": 0.030636591348699803,\n \"acc_norm\": 0.7040358744394619,\n \"acc_norm_stderr\": 0.030636591348699803\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8091603053435115,\n \"acc_stderr\": 0.03446513350752599,\n \"acc_norm\": 0.8091603053435115,\n \"acc_norm_stderr\": 0.03446513350752599\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4017857142857143,\n \"acc_stderr\": 0.04653333146973646,\n \"acc_norm\": 0.4017857142857143,\n \"acc_norm_stderr\": 0.04653333146973646\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.020588491316092368,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.020588491316092368\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8237547892720306,\n \"acc_stderr\": 0.013625556907993462,\n \"acc_norm\": 0.8237547892720306,\n \"acc_norm_stderr\": 0.013625556907993462\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7514450867052023,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.7514450867052023,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4301675977653631,\n \"acc_stderr\": 0.01655860163604103,\n \"acc_norm\": 0.4301675977653631,\n \"acc_norm_stderr\": 0.01655860163604103\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7124183006535948,\n \"acc_stderr\": 0.025917806117147158,\n \"acc_norm\": 0.7124183006535948,\n \"acc_norm_stderr\": 0.025917806117147158\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5035460992907801,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.5035460992907801,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4595827900912647,\n \"acc_stderr\": 0.012728446067669971,\n \"acc_norm\": 0.4595827900912647,\n \"acc_norm_stderr\": 0.012728446067669971\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6650326797385621,\n \"acc_stderr\": 0.01909422816700033,\n \"acc_norm\": 0.6650326797385621,\n \"acc_norm_stderr\": 0.01909422816700033\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.0282638899437846,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.0282638899437846\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5410036719706243,\n \"mc1_stderr\": 0.017444544447661206,\n \"mc2\": 0.6808667640260779,\n \"mc2_stderr\": 0.015144269926582927\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8437253354380426,\n \"acc_stderr\": 0.0102053517918735\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7005307050796058,\n \"acc_stderr\": 0.012616300735519649\n }\n}\n```", "repo_url": "https://huggingface.co/BarryFutureman/WildWest-Variant3-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|arc:challenge|25_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|gsm8k|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hellaswag|10_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T04-01-23.522881.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["**/details_harness|winogrande|5_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T04-01-23.522881.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T04_01_23.522881", "path": ["results_2024-01-23T04-01-23.522881.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T04-01-23.522881.parquet"]}]}]}
2024-01-23T04:04:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of BarryFutureman/WildWest-Variant3-7B Dataset automatically created during the evaluation run of model BarryFutureman/WildWest-Variant3-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T04:01:23.522881(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of BarryFutureman/WildWest-Variant3-7B\n\n\n\nDataset automatically created during the evaluation run of model BarryFutureman/WildWest-Variant3-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T04:01:23.522881(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of BarryFutureman/WildWest-Variant3-7B\n\n\n\nDataset automatically created during the evaluation run of model BarryFutureman/WildWest-Variant3-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T04:01:23.522881(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
23d00e3a535a74bd4e13f2b18940e189076c5787
# Dataset Card This dataset is a UMAP 2D-projection of the glove.6B.50d embeddings from Stanford. It is intended as a fast reference for visualizing embeddings in a workshop from the AI Service Center Berlin-Brandenburg at the Hasso Plattner Institute. ## Dataset Details ### Dataset Description The embeddings have a vocabulary of 400k tokens with 2 dimensions each token. **Curated by:** [Mario Tormo Romero](https://huggingface.co/mt0rm0) **License:** cc0-1.0 ### Dataset Sources This Dataset has been created with UMAP from the [glove.6B.50d embeddings](https://nlp.stanford.edu/projects/glove/). ## Uses This is a dataset created for pegagogical purposes, and is used in the **Working with embeddings** Workshop created and organized by the [AI Service Center Berlin-Brandenburg](https://hpi.de/kisz/) at the [Hasso Plattner Institute](https://hpi.de/). ## Dataset Creation ### Curation Rationale We want to provide with this dataset a fast way of obtaining the required data for our workshops, without having to process the data for long periods during the workshop.
mt0rm0/glove.6B.50d.umap.2d
[ "task_categories:sentence-similarity", "size_categories:100K<n<1M", "language:en", "license:cc0-1.0", "GloVe", "embeddings", "semantic search", "hpi", "workshop", "region:us" ]
2024-01-23T04:26:40+00:00
{"language": ["en"], "license": "cc0-1.0", "size_categories": ["100K<n<1M"], "task_categories": ["sentence-similarity"], "pretty_name": "\"UMAP 2D-Projection of glove.6B.50d embeddings\"", "tags": ["GloVe", "embeddings", "semantic search", "hpi", "workshop"]}
2024-01-23T05:00:09+00:00
[]
[ "en" ]
TAGS #task_categories-sentence-similarity #size_categories-100K<n<1M #language-English #license-cc0-1.0 #GloVe #embeddings #semantic search #hpi #workshop #region-us
# Dataset Card This dataset is a UMAP 2D-projection of the glove.6B.50d embeddings from Stanford. It is intended as a fast reference for visualizing embeddings in a workshop from the AI Service Center Berlin-Brandenburg at the Hasso Plattner Institute. ## Dataset Details ### Dataset Description The embeddings have a vocabulary of 400k tokens with 2 dimensions each token. Curated by: Mario Tormo Romero License: cc0-1.0 ### Dataset Sources This Dataset has been created with UMAP from the glove.6B.50d embeddings. ## Uses This is a dataset created for pegagogical purposes, and is used in the Working with embeddings Workshop created and organized by the AI Service Center Berlin-Brandenburg at the Hasso Plattner Institute. ## Dataset Creation ### Curation Rationale We want to provide with this dataset a fast way of obtaining the required data for our workshops, without having to process the data for long periods during the workshop.
[ "# Dataset Card\n\nThis dataset is a UMAP 2D-projection of the glove.6B.50d embeddings from Stanford. It is intended as a fast reference for visualizing embeddings in a workshop from the AI Service Center Berlin-Brandenburg at the Hasso Plattner Institute.", "## Dataset Details", "### Dataset Description\n\nThe embeddings have a vocabulary of 400k tokens with 2 dimensions each token.\n\nCurated by: Mario Tormo Romero\n \nLicense: cc0-1.0", "### Dataset Sources\nThis Dataset has been created with UMAP from the glove.6B.50d embeddings.", "## Uses\n\nThis is a dataset created for pegagogical purposes, and is used in the Working with embeddings Workshop created and organized by the AI Service Center Berlin-Brandenburg at the Hasso Plattner Institute.", "## Dataset Creation", "### Curation Rationale\n\nWe want to provide with this dataset a fast way of obtaining the required data for our workshops, without having to process the data for long periods during the workshop." ]
[ "TAGS\n#task_categories-sentence-similarity #size_categories-100K<n<1M #language-English #license-cc0-1.0 #GloVe #embeddings #semantic search #hpi #workshop #region-us \n", "# Dataset Card\n\nThis dataset is a UMAP 2D-projection of the glove.6B.50d embeddings from Stanford. It is intended as a fast reference for visualizing embeddings in a workshop from the AI Service Center Berlin-Brandenburg at the Hasso Plattner Institute.", "## Dataset Details", "### Dataset Description\n\nThe embeddings have a vocabulary of 400k tokens with 2 dimensions each token.\n\nCurated by: Mario Tormo Romero\n \nLicense: cc0-1.0", "### Dataset Sources\nThis Dataset has been created with UMAP from the glove.6B.50d embeddings.", "## Uses\n\nThis is a dataset created for pegagogical purposes, and is used in the Working with embeddings Workshop created and organized by the AI Service Center Berlin-Brandenburg at the Hasso Plattner Institute.", "## Dataset Creation", "### Curation Rationale\n\nWe want to provide with this dataset a fast way of obtaining the required data for our workshops, without having to process the data for long periods during the workshop." ]
0573e8559b636225357cfdfe00f09913ef51ffb0
# Dataset Card for Evaluation run of binbi/MoMo-70B-V1.2_1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [binbi/MoMo-70B-V1.2_1](https://huggingface.co/binbi/MoMo-70B-V1.2_1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_binbi__MoMo-70B-V1.2_1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T05:09:38.161416](https://huggingface.co/datasets/open-llm-leaderboard/details_binbi__MoMo-70B-V1.2_1/blob/main/results_2024-01-23T05-09-38.161416.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.698588284419882, "acc_stderr": 0.0303909434575038, "acc_norm": 0.7024430426815292, "acc_norm_stderr": 0.03098225812650051, "mc1": 0.4357405140758874, "mc1_stderr": 0.017358345398863124, "mc2": 0.6130536296369835, "mc2_stderr": 0.01483068540330801 }, "harness|arc:challenge|25": { "acc": 0.6715017064846417, "acc_stderr": 0.013724978465537302, "acc_norm": 0.7090443686006825, "acc_norm_stderr": 0.01327307786590759 }, "harness|hellaswag|10": { "acc": 0.6775542720573591, "acc_stderr": 0.00466457278498559, "acc_norm": 0.8646683927504482, "acc_norm_stderr": 0.0034137831331580715 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595852, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595852 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8092105263157895, "acc_stderr": 0.031975658210325, "acc_norm": 0.8092105263157895, "acc_norm_stderr": 0.031975658210325 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7132075471698113, "acc_stderr": 0.027834912527544064, "acc_norm": 0.7132075471698113, "acc_norm_stderr": 0.027834912527544064 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8333333333333334, "acc_stderr": 0.031164899666948617, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.031164899666948617 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6820809248554913, "acc_stderr": 0.0355068398916558, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.0355068398916558 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082635, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082635 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.676595744680851, "acc_stderr": 0.030579442773610337, "acc_norm": 0.676595744680851, "acc_norm_stderr": 0.030579442773610337 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4298245614035088, "acc_stderr": 0.04657047260594963, "acc_norm": 0.4298245614035088, "acc_norm_stderr": 0.04657047260594963 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6275862068965518, "acc_stderr": 0.04028731532947558, "acc_norm": 0.6275862068965518, "acc_norm_stderr": 0.04028731532947558 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.025487187147859372, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.025487187147859372 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.49206349206349204, "acc_stderr": 0.044715725362943486, "acc_norm": 0.49206349206349204, "acc_norm_stderr": 0.044715725362943486 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8290322580645161, "acc_stderr": 0.02141724293632159, "acc_norm": 0.8290322580645161, "acc_norm_stderr": 0.02141724293632159 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.541871921182266, "acc_stderr": 0.03505630140785741, "acc_norm": 0.541871921182266, "acc_norm_stderr": 0.03505630140785741 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8303030303030303, "acc_stderr": 0.029311188674983137, "acc_norm": 0.8303030303030303, "acc_norm_stderr": 0.029311188674983137 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8838383838383839, "acc_stderr": 0.022828881775249377, "acc_norm": 0.8838383838383839, "acc_norm_stderr": 0.022828881775249377 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.927461139896373, "acc_stderr": 0.018718998520678178, "acc_norm": 0.927461139896373, "acc_norm_stderr": 0.018718998520678178 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6897435897435897, "acc_stderr": 0.023454674889404288, "acc_norm": 0.6897435897435897, "acc_norm_stderr": 0.023454674889404288 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.02840653309060846, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.02840653309060846 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.773109243697479, "acc_stderr": 0.027205371538279472, "acc_norm": 0.773109243697479, "acc_norm_stderr": 0.027205371538279472 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.47019867549668876, "acc_stderr": 0.040752249922169775, "acc_norm": 0.47019867549668876, "acc_norm_stderr": 0.040752249922169775 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9064220183486239, "acc_stderr": 0.012486841824601963, "acc_norm": 0.9064220183486239, "acc_norm_stderr": 0.012486841824601963 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6111111111111112, "acc_stderr": 0.03324708911809117, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.03324708911809117 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9068627450980392, "acc_stderr": 0.020397853969427, "acc_norm": 0.9068627450980392, "acc_norm_stderr": 0.020397853969427 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8945147679324894, "acc_stderr": 0.01999556072375854, "acc_norm": 0.8945147679324894, "acc_norm_stderr": 0.01999556072375854 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7892376681614349, "acc_stderr": 0.027373095500540193, "acc_norm": 0.7892376681614349, "acc_norm_stderr": 0.027373095500540193 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8473282442748091, "acc_stderr": 0.031545216720054725, "acc_norm": 0.8473282442748091, "acc_norm_stderr": 0.031545216720054725 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035196, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035196 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8333333333333334, "acc_stderr": 0.03602814176392645, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.03602814176392645 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8159509202453987, "acc_stderr": 0.030446777687971726, "acc_norm": 0.8159509202453987, "acc_norm_stderr": 0.030446777687971726 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5357142857142857, "acc_stderr": 0.04733667890053756, "acc_norm": 0.5357142857142857, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8349514563106796, "acc_stderr": 0.03675668832233188, "acc_norm": 0.8349514563106796, "acc_norm_stderr": 0.03675668832233188 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.020588491316092368, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.020588491316092368 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8710089399744572, "acc_stderr": 0.01198637154808687, "acc_norm": 0.8710089399744572, "acc_norm_stderr": 0.01198637154808687 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7774566473988439, "acc_stderr": 0.02239421566194282, "acc_norm": 0.7774566473988439, "acc_norm_stderr": 0.02239421566194282 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6245810055865921, "acc_stderr": 0.01619510424846353, "acc_norm": 0.6245810055865921, "acc_norm_stderr": 0.01619510424846353 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.761437908496732, "acc_stderr": 0.02440439492808787, "acc_norm": 0.761437908496732, "acc_norm_stderr": 0.02440439492808787 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7620578778135049, "acc_stderr": 0.02418515064781871, "acc_norm": 0.7620578778135049, "acc_norm_stderr": 0.02418515064781871 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8364197530864198, "acc_stderr": 0.020581466138257117, "acc_norm": 0.8364197530864198, "acc_norm_stderr": 0.020581466138257117 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5390070921985816, "acc_stderr": 0.02973659252642444, "acc_norm": 0.5390070921985816, "acc_norm_stderr": 0.02973659252642444 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5573663624511083, "acc_stderr": 0.012685906538206237, "acc_norm": 0.5573663624511083, "acc_norm_stderr": 0.012685906538206237 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7426470588235294, "acc_stderr": 0.0265565194700415, "acc_norm": 0.7426470588235294, "acc_norm_stderr": 0.0265565194700415 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7663398692810458, "acc_stderr": 0.017119158496044506, "acc_norm": 0.7663398692810458, "acc_norm_stderr": 0.017119158496044506 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04265792110940589, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04265792110940589 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8, "acc_stderr": 0.02560737598657916, "acc_norm": 0.8, "acc_norm_stderr": 0.02560737598657916 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8656716417910447, "acc_stderr": 0.024112678240900798, "acc_norm": 0.8656716417910447, "acc_norm_stderr": 0.024112678240900798 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.030151134457776334, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776334 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.03882310850890594, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.03882310850890594 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8713450292397661, "acc_stderr": 0.02567934272327692, "acc_norm": 0.8713450292397661, "acc_norm_stderr": 0.02567934272327692 }, "harness|truthfulqa:mc|0": { "mc1": 0.4357405140758874, "mc1_stderr": 0.017358345398863124, "mc2": 0.6130536296369835, "mc2_stderr": 0.01483068540330801 }, "harness|winogrande|5": { "acc": 0.8310970797158642, "acc_stderr": 0.010529981411838895 }, "harness|gsm8k|5": { "acc": 0.5633055344958302, "acc_stderr": 0.013661649780905488 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_binbi__MoMo-70B-V1.2_1
[ "region:us" ]
2024-01-23T05:11:59+00:00
{"pretty_name": "Evaluation run of binbi/MoMo-70B-V1.2_1", "dataset_summary": "Dataset automatically created during the evaluation run of model [binbi/MoMo-70B-V1.2_1](https://huggingface.co/binbi/MoMo-70B-V1.2_1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_binbi__MoMo-70B-V1.2_1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T05:09:38.161416](https://huggingface.co/datasets/open-llm-leaderboard/details_binbi__MoMo-70B-V1.2_1/blob/main/results_2024-01-23T05-09-38.161416.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.698588284419882,\n \"acc_stderr\": 0.0303909434575038,\n \"acc_norm\": 0.7024430426815292,\n \"acc_norm_stderr\": 0.03098225812650051,\n \"mc1\": 0.4357405140758874,\n \"mc1_stderr\": 0.017358345398863124,\n \"mc2\": 0.6130536296369835,\n \"mc2_stderr\": 0.01483068540330801\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6715017064846417,\n \"acc_stderr\": 0.013724978465537302,\n \"acc_norm\": 0.7090443686006825,\n \"acc_norm_stderr\": 0.01327307786590759\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6775542720573591,\n \"acc_stderr\": 0.00466457278498559,\n \"acc_norm\": 0.8646683927504482,\n \"acc_norm_stderr\": 0.0034137831331580715\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595852,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595852\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8092105263157895,\n \"acc_stderr\": 0.031975658210325,\n \"acc_norm\": 0.8092105263157895,\n \"acc_norm_stderr\": 0.031975658210325\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7132075471698113,\n \"acc_stderr\": 0.027834912527544064,\n \"acc_norm\": 0.7132075471698113,\n \"acc_norm_stderr\": 0.027834912527544064\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.031164899666948617,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.031164899666948617\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.0355068398916558,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.0355068398916558\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082635,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082635\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.676595744680851,\n \"acc_stderr\": 0.030579442773610337,\n \"acc_norm\": 0.676595744680851,\n \"acc_norm_stderr\": 0.030579442773610337\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.04657047260594963,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.04657047260594963\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6275862068965518,\n \"acc_stderr\": 0.04028731532947558,\n \"acc_norm\": 0.6275862068965518,\n \"acc_norm_stderr\": 0.04028731532947558\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.025487187147859372,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.025487187147859372\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.49206349206349204,\n \"acc_stderr\": 0.044715725362943486,\n \"acc_norm\": 0.49206349206349204,\n \"acc_norm_stderr\": 0.044715725362943486\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8290322580645161,\n \"acc_stderr\": 0.02141724293632159,\n \"acc_norm\": 0.8290322580645161,\n \"acc_norm_stderr\": 0.02141724293632159\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.541871921182266,\n \"acc_stderr\": 0.03505630140785741,\n \"acc_norm\": 0.541871921182266,\n \"acc_norm_stderr\": 0.03505630140785741\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8303030303030303,\n \"acc_stderr\": 0.029311188674983137,\n \"acc_norm\": 0.8303030303030303,\n \"acc_norm_stderr\": 0.029311188674983137\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8838383838383839,\n \"acc_stderr\": 0.022828881775249377,\n \"acc_norm\": 0.8838383838383839,\n \"acc_norm_stderr\": 0.022828881775249377\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.927461139896373,\n \"acc_stderr\": 0.018718998520678178,\n \"acc_norm\": 0.927461139896373,\n \"acc_norm_stderr\": 0.018718998520678178\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6897435897435897,\n \"acc_stderr\": 0.023454674889404288,\n \"acc_norm\": 0.6897435897435897,\n \"acc_norm_stderr\": 0.023454674889404288\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060846,\n \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060846\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.773109243697479,\n \"acc_stderr\": 0.027205371538279472,\n \"acc_norm\": 0.773109243697479,\n \"acc_norm_stderr\": 0.027205371538279472\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.47019867549668876,\n \"acc_stderr\": 0.040752249922169775,\n \"acc_norm\": 0.47019867549668876,\n \"acc_norm_stderr\": 0.040752249922169775\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9064220183486239,\n \"acc_stderr\": 0.012486841824601963,\n \"acc_norm\": 0.9064220183486239,\n \"acc_norm_stderr\": 0.012486841824601963\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.03324708911809117,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.03324708911809117\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9068627450980392,\n \"acc_stderr\": 0.020397853969427,\n \"acc_norm\": 0.9068627450980392,\n \"acc_norm_stderr\": 0.020397853969427\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8945147679324894,\n \"acc_stderr\": 0.01999556072375854,\n \"acc_norm\": 0.8945147679324894,\n \"acc_norm_stderr\": 0.01999556072375854\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7892376681614349,\n \"acc_stderr\": 0.027373095500540193,\n \"acc_norm\": 0.7892376681614349,\n \"acc_norm_stderr\": 0.027373095500540193\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8473282442748091,\n \"acc_stderr\": 0.031545216720054725,\n \"acc_norm\": 0.8473282442748091,\n \"acc_norm_stderr\": 0.031545216720054725\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035196,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035196\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.03602814176392645,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.03602814176392645\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8159509202453987,\n \"acc_stderr\": 0.030446777687971726,\n \"acc_norm\": 0.8159509202453987,\n \"acc_norm_stderr\": 0.030446777687971726\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5357142857142857,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.5357142857142857,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.03675668832233188,\n \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.03675668832233188\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.020588491316092368,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.020588491316092368\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8710089399744572,\n \"acc_stderr\": 0.01198637154808687,\n \"acc_norm\": 0.8710089399744572,\n \"acc_norm_stderr\": 0.01198637154808687\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7774566473988439,\n \"acc_stderr\": 0.02239421566194282,\n \"acc_norm\": 0.7774566473988439,\n \"acc_norm_stderr\": 0.02239421566194282\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.6245810055865921,\n \"acc_stderr\": 0.01619510424846353,\n \"acc_norm\": 0.6245810055865921,\n \"acc_norm_stderr\": 0.01619510424846353\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.761437908496732,\n \"acc_stderr\": 0.02440439492808787,\n \"acc_norm\": 0.761437908496732,\n \"acc_norm_stderr\": 0.02440439492808787\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7620578778135049,\n \"acc_stderr\": 0.02418515064781871,\n \"acc_norm\": 0.7620578778135049,\n \"acc_norm_stderr\": 0.02418515064781871\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8364197530864198,\n \"acc_stderr\": 0.020581466138257117,\n \"acc_norm\": 0.8364197530864198,\n \"acc_norm_stderr\": 0.020581466138257117\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5390070921985816,\n \"acc_stderr\": 0.02973659252642444,\n \"acc_norm\": 0.5390070921985816,\n \"acc_norm_stderr\": 0.02973659252642444\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5573663624511083,\n \"acc_stderr\": 0.012685906538206237,\n \"acc_norm\": 0.5573663624511083,\n \"acc_norm_stderr\": 0.012685906538206237\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7426470588235294,\n \"acc_stderr\": 0.0265565194700415,\n \"acc_norm\": 0.7426470588235294,\n \"acc_norm_stderr\": 0.0265565194700415\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7663398692810458,\n \"acc_stderr\": 0.017119158496044506,\n \"acc_norm\": 0.7663398692810458,\n \"acc_norm_stderr\": 0.017119158496044506\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04265792110940589,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04265792110940589\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.02560737598657916,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.02560737598657916\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8656716417910447,\n \"acc_stderr\": 0.024112678240900798,\n \"acc_norm\": 0.8656716417910447,\n \"acc_norm_stderr\": 0.024112678240900798\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.03882310850890594,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.03882310850890594\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8713450292397661,\n \"acc_stderr\": 0.02567934272327692,\n \"acc_norm\": 0.8713450292397661,\n \"acc_norm_stderr\": 0.02567934272327692\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4357405140758874,\n \"mc1_stderr\": 0.017358345398863124,\n \"mc2\": 0.6130536296369835,\n \"mc2_stderr\": 0.01483068540330801\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8310970797158642,\n \"acc_stderr\": 0.010529981411838895\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5633055344958302,\n \"acc_stderr\": 0.013661649780905488\n }\n}\n```", "repo_url": "https://huggingface.co/binbi/MoMo-70B-V1.2_1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|arc:challenge|25_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|gsm8k|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hellaswag|10_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T05-09-38.161416.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["**/details_harness|winogrande|5_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T05-09-38.161416.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T05_09_38.161416", "path": ["results_2024-01-23T05-09-38.161416.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T05-09-38.161416.parquet"]}]}]}
2024-01-23T05:12:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of binbi/MoMo-70B-V1.2_1 Dataset automatically created during the evaluation run of model binbi/MoMo-70B-V1.2_1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T05:09:38.161416(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of binbi/MoMo-70B-V1.2_1\n\n\n\nDataset automatically created during the evaluation run of model binbi/MoMo-70B-V1.2_1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T05:09:38.161416(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of binbi/MoMo-70B-V1.2_1\n\n\n\nDataset automatically created during the evaluation run of model binbi/MoMo-70B-V1.2_1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T05:09:38.161416(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
af839bd2429ba3d4b59447b97490eaac95619ff9
# Dataset Card for Evaluation run of jsfs11/West-Dare-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jsfs11/West-Dare-7B](https://huggingface.co/jsfs11/West-Dare-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jsfs11__West-Dare-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T05:18:14.864719](https://huggingface.co/datasets/open-llm-leaderboard/details_jsfs11__West-Dare-7B/blob/main/results_2024-01-23T05-18-14.864719.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.648567891573413, "acc_stderr": 0.03210353895871729, "acc_norm": 0.6480416995389463, "acc_norm_stderr": 0.03277410806832145, "mc1": 0.5018359853121175, "mc1_stderr": 0.017503383046877055, "mc2": 0.6625371118483276, "mc2_stderr": 0.01544150612030805 }, "harness|arc:challenge|25": { "acc": 0.6843003412969283, "acc_stderr": 0.013582571095815291, "acc_norm": 0.7141638225255973, "acc_norm_stderr": 0.013203196088537376 }, "harness|hellaswag|10": { "acc": 0.7099183429595698, "acc_stderr": 0.004528723951878242, "acc_norm": 0.8757219677355108, "acc_norm_stderr": 0.0032922425436373304 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.041716541613545426, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.041716541613545426 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.03738520676119669, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.03738520676119669 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695238, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695238 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.690566037735849, "acc_stderr": 0.028450154794118637, "acc_norm": 0.690566037735849, "acc_norm_stderr": 0.028450154794118637 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.03586879280080341, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.03586879280080341 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.04878608714466996, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.04878608714466996 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.574468085106383, "acc_stderr": 0.03232146916224468, "acc_norm": 0.574468085106383, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5175438596491229, "acc_stderr": 0.04700708033551038, "acc_norm": 0.5175438596491229, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.43915343915343913, "acc_stderr": 0.025559920550531003, "acc_norm": 0.43915343915343913, "acc_norm_stderr": 0.025559920550531003 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7612903225806451, "acc_stderr": 0.02425107126220884, "acc_norm": 0.7612903225806451, "acc_norm_stderr": 0.02425107126220884 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4827586206896552, "acc_stderr": 0.035158955511656986, "acc_norm": 0.4827586206896552, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7515151515151515, "acc_stderr": 0.033744026441394036, "acc_norm": 0.7515151515151515, "acc_norm_stderr": 0.033744026441394036 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033456, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033456 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6538461538461539, "acc_stderr": 0.024121125416941197, "acc_norm": 0.6538461538461539, "acc_norm_stderr": 0.024121125416941197 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616248, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616248 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.680672268907563, "acc_stderr": 0.0302839955258844, "acc_norm": 0.680672268907563, "acc_norm_stderr": 0.0302839955258844 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.038227469376587525, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.038227469376587525 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5, "acc_stderr": 0.034099716973523674, "acc_norm": 0.5, "acc_norm_stderr": 0.034099716973523674 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02552472232455334, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02552472232455334 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8143459915611815, "acc_stderr": 0.025310495376944856, "acc_norm": 0.8143459915611815, "acc_norm_stderr": 0.025310495376944856 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.03641297081313729, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.03641297081313729 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7603305785123967, "acc_stderr": 0.03896878985070416, "acc_norm": 0.7603305785123967, "acc_norm_stderr": 0.03896878985070416 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252626, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252626 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.02158649400128137, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.02158649400128137 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8352490421455939, "acc_stderr": 0.013265346261323797, "acc_norm": 0.8352490421455939, "acc_norm_stderr": 0.013265346261323797 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7283236994219653, "acc_stderr": 0.023948512905468365, "acc_norm": 0.7283236994219653, "acc_norm_stderr": 0.023948512905468365 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4044692737430168, "acc_stderr": 0.01641444091729315, "acc_norm": 0.4044692737430168, "acc_norm_stderr": 0.01641444091729315 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818733, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818733 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.02583989833487798, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4858156028368794, "acc_stderr": 0.02981549448368206, "acc_norm": 0.4858156028368794, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4680573663624511, "acc_stderr": 0.012744149704869649, "acc_norm": 0.4680573663624511, "acc_norm_stderr": 0.012744149704869649 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6727941176470589, "acc_stderr": 0.028501452860396556, "acc_norm": 0.6727941176470589, "acc_norm_stderr": 0.028501452860396556 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6617647058823529, "acc_stderr": 0.01913994374848704, "acc_norm": 0.6617647058823529, "acc_norm_stderr": 0.01913994374848704 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7183673469387755, "acc_stderr": 0.028795185574291293, "acc_norm": 0.7183673469387755, "acc_norm_stderr": 0.028795185574291293 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.02619392354445412, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.02619392354445412 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.5018359853121175, "mc1_stderr": 0.017503383046877055, "mc2": 0.6625371118483276, "mc2_stderr": 0.01544150612030805 }, "harness|winogrande|5": { "acc": 0.8453038674033149, "acc_stderr": 0.010163172650433535 }, "harness|gsm8k|5": { "acc": 0.6785443517816527, "acc_stderr": 0.012864471384836705 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jsfs11__West-Dare-7B
[ "region:us" ]
2024-01-23T05:20:36+00:00
{"pretty_name": "Evaluation run of jsfs11/West-Dare-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [jsfs11/West-Dare-7B](https://huggingface.co/jsfs11/West-Dare-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jsfs11__West-Dare-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T05:18:14.864719](https://huggingface.co/datasets/open-llm-leaderboard/details_jsfs11__West-Dare-7B/blob/main/results_2024-01-23T05-18-14.864719.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.648567891573413,\n \"acc_stderr\": 0.03210353895871729,\n \"acc_norm\": 0.6480416995389463,\n \"acc_norm_stderr\": 0.03277410806832145,\n \"mc1\": 0.5018359853121175,\n \"mc1_stderr\": 0.017503383046877055,\n \"mc2\": 0.6625371118483276,\n \"mc2_stderr\": 0.01544150612030805\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6843003412969283,\n \"acc_stderr\": 0.013582571095815291,\n \"acc_norm\": 0.7141638225255973,\n \"acc_norm_stderr\": 0.013203196088537376\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7099183429595698,\n \"acc_stderr\": 0.004528723951878242,\n \"acc_norm\": 0.8757219677355108,\n \"acc_norm_stderr\": 0.0032922425436373304\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.03738520676119669,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.03738520676119669\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695238,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695238\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.690566037735849,\n \"acc_stderr\": 0.028450154794118637,\n \"acc_norm\": 0.690566037735849,\n \"acc_norm_stderr\": 0.028450154794118637\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.04878608714466996,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.04878608714466996\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.03232146916224468,\n \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.03232146916224468\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5175438596491229,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.5175438596491229,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.43915343915343913,\n \"acc_stderr\": 0.025559920550531003,\n \"acc_norm\": 0.43915343915343913,\n \"acc_norm_stderr\": 0.025559920550531003\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7612903225806451,\n \"acc_stderr\": 0.02425107126220884,\n \"acc_norm\": 0.7612903225806451,\n \"acc_norm_stderr\": 0.02425107126220884\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4827586206896552,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.4827586206896552,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7515151515151515,\n \"acc_stderr\": 0.033744026441394036,\n \"acc_norm\": 0.7515151515151515,\n \"acc_norm_stderr\": 0.033744026441394036\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6538461538461539,\n \"acc_stderr\": 0.024121125416941197,\n \"acc_norm\": 0.6538461538461539,\n \"acc_norm_stderr\": 0.024121125416941197\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616248,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616248\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.0302839955258844,\n \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.0302839955258844\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.32450331125827814,\n \"acc_stderr\": 0.038227469376587525,\n \"acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.038227469376587525\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.034099716973523674,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.034099716973523674\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02552472232455334,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02552472232455334\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8143459915611815,\n \"acc_stderr\": 0.025310495376944856,\n \"acc_norm\": 0.8143459915611815,\n \"acc_norm_stderr\": 0.025310495376944856\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313729,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313729\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070416,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070416\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252626,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252626\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.02158649400128137,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.02158649400128137\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8352490421455939,\n \"acc_stderr\": 0.013265346261323797,\n \"acc_norm\": 0.8352490421455939,\n \"acc_norm_stderr\": 0.013265346261323797\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7283236994219653,\n \"acc_stderr\": 0.023948512905468365,\n \"acc_norm\": 0.7283236994219653,\n \"acc_norm_stderr\": 0.023948512905468365\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4044692737430168,\n \"acc_stderr\": 0.01641444091729315,\n \"acc_norm\": 0.4044692737430168,\n \"acc_norm_stderr\": 0.01641444091729315\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818733,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818733\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4680573663624511,\n \"acc_stderr\": 0.012744149704869649,\n \"acc_norm\": 0.4680573663624511,\n \"acc_norm_stderr\": 0.012744149704869649\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.028501452860396556,\n \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.028501452860396556\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.01913994374848704,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.01913994374848704\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7183673469387755,\n \"acc_stderr\": 0.028795185574291293,\n \"acc_norm\": 0.7183673469387755,\n \"acc_norm_stderr\": 0.028795185574291293\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.02619392354445412,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.02619392354445412\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5018359853121175,\n \"mc1_stderr\": 0.017503383046877055,\n \"mc2\": 0.6625371118483276,\n \"mc2_stderr\": 0.01544150612030805\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8453038674033149,\n \"acc_stderr\": 0.010163172650433535\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6785443517816527,\n \"acc_stderr\": 0.012864471384836705\n }\n}\n```", "repo_url": "https://huggingface.co/jsfs11/West-Dare-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|arc:challenge|25_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|gsm8k|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hellaswag|10_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T05-18-14.864719.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["**/details_harness|winogrande|5_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T05-18-14.864719.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T05_18_14.864719", "path": ["results_2024-01-23T05-18-14.864719.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T05-18-14.864719.parquet"]}]}]}
2024-01-23T05:20:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jsfs11/West-Dare-7B Dataset automatically created during the evaluation run of model jsfs11/West-Dare-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T05:18:14.864719(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of jsfs11/West-Dare-7B\n\n\n\nDataset automatically created during the evaluation run of model jsfs11/West-Dare-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T05:18:14.864719(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jsfs11/West-Dare-7B\n\n\n\nDataset automatically created during the evaluation run of model jsfs11/West-Dare-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T05:18:14.864719(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
7ec87b0c5fec6395aa441d6e5134fe5155d6737c
This data is generated with AI model from quality texts. # Version * 0.1: add korean history. --- This software/dataset is provided "as is" without express or implied warranty to the extent permitted by applicable law If you used this dataset, specify in your model at the model-card. ---
ziozzang/Korean_QA_gen_datasets
[ "region:us" ]
2024-01-23T05:25:38+00:00
{}
2024-01-23T05:30:07+00:00
[]
[]
TAGS #region-us
This data is generated with AI model from quality texts. # Version * 0.1: add korean history. --- This software/dataset is provided "as is" without express or implied warranty to the extent permitted by applicable law If you used this dataset, specify in your model at the model-card. ---
[ "# Version\n* 0.1: add korean history.\n\n---\nThis software/dataset is provided \"as is\" without express or implied warranty to the extent permitted by applicable law\n\nIf you used this dataset, specify in your model at the model-card.\n---" ]
[ "TAGS\n#region-us \n", "# Version\n* 0.1: add korean history.\n\n---\nThis software/dataset is provided \"as is\" without express or implied warranty to the extent permitted by applicable law\n\nIf you used this dataset, specify in your model at the model-card.\n---" ]
fd616c50db2bd3ca88c596662beadd01eccc1fe0
Sourced from USDA website, open published by working groups which collected this data amidst their work as they identified plants growing within each state, in the field. From the USDA website: "NRCS personnel and cooperators can download an NRCS State Plants List which includes Symbol, Synonym Symbol, Scientific Name with Authors, preferred State Common Name, and Family. Fields in the State Plants text file are delimited by commas and enclosed in double quotes. You can import this file into many databases or spreadsheets. For example, first save the .txt file, then open in Microsoft Excel by specifying "Text Files" in the file type scroll box, and import by specifying "Comma" as the delimiter. Or use the file directly in Excel: copy it from the screen, paste it into a new worksheet, and select "Text to Columns..." from the Data menu. To begin the download, please click your preferred state on the map or list of states below. For questions about the State Plants List for your area please contact your State Plants Coordinator"
Solshine/USDA_Plants_Database_By_State_WorkingGroupList
[ "language:en", "biology", "climate", "plants", "region:us" ]
2024-01-23T05:33:46+00:00
{"language": ["en"], "pretty_name": "USDA US Plants Database by State", "tags": ["biology", "climate", "plants"]}
2024-01-27T21:35:12+00:00
[]
[ "en" ]
TAGS #language-English #biology #climate #plants #region-us
Sourced from USDA website, open published by working groups which collected this data amidst their work as they identified plants growing within each state, in the field. From the USDA website: "NRCS personnel and cooperators can download an NRCS State Plants List which includes Symbol, Synonym Symbol, Scientific Name with Authors, preferred State Common Name, and Family. Fields in the State Plants text file are delimited by commas and enclosed in double quotes. You can import this file into many databases or spreadsheets. For example, first save the .txt file, then open in Microsoft Excel by specifying "Text Files" in the file type scroll box, and import by specifying "Comma" as the delimiter. Or use the file directly in Excel: copy it from the screen, paste it into a new worksheet, and select "Text to Columns..." from the Data menu. To begin the download, please click your preferred state on the map or list of states below. For questions about the State Plants List for your area please contact your State Plants Coordinator"
[]
[ "TAGS\n#language-English #biology #climate #plants #region-us \n" ]
e881745c3f9e4a072c24dba08780ed816924b45e
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> This dataset provides algorithms and corresponding Python source code which can be leveraged for any type of code conversion applications. - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
annawleo/python-algorithm-sourcecode
[ "language:en", "license:apache-2.0", "python", "sourcecode", "algorithm", "pseudocode", "python3", "region:us" ]
2024-01-23T06:08:54+00:00
{"language": ["en"], "license": "apache-2.0", "tags": ["python", "sourcecode", "algorithm", "pseudocode", "python3"]}
2024-01-24T07:41:54+00:00
[]
[ "en" ]
TAGS #language-English #license-apache-2.0 #python #sourcecode #algorithm #pseudocode #python3 #region-us
# Dataset Card for Dataset Name This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ## Dataset Details ### Dataset Description This dataset provides algorithms and corresponding Python source code which can be leveraged for any type of code conversion applications. - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\nThis dataset provides algorithms and corresponding Python source code which can be leveraged for any type of code conversion applications. \n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#language-English #license-apache-2.0 #python #sourcecode #algorithm #pseudocode #python3 #region-us \n", "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\nThis dataset provides algorithms and corresponding Python source code which can be leveraged for any type of code conversion applications. \n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
09907528a7aa45e20077a2d51004468903c0f968
# GNHK Synthetic OCR Dataset ## Overview Welcome to the GNHK Synthetic OCR Dataset repository. Here I have generated synthetic data using [GNHK Dataset](https://github.com/GoodNotes/GNHK-dataset), and Open Source LLMs like Mixtral. The dataset contains queries on the images and their answers. ## What's Inside? - **Dataset Folder:** The Dataset Folder contains the images, and corresponding to each image, there is a JSON file which carries the ocr information of that image - **Parquet File:** For easy handling and analysis, the processed dataset is saved as a Parquet file (`dataset.parquet`). This file contains images, their OCR text, one probable question per image, and its likely answer. # Methodology for Generation ## ParseJSON.ipynb This Python notebook interacts with a dataset provided by GNHK, stored on Google Drive. The dataset consists of images, each accompanied by a JSON file containing OCR information for that image. The purpose of ParseJSON is to extract information from these JSON files, convert it into text files, and store these files in a folder named `parsed_dataset` on the same Google Drive. ### What does it parse to? - **ocr_data**: It extracts OCR texts for words based on their 'line_index' and organizes them to represent the OCR text of the given image. - **bbox_data**: Another text file is generated by the parser, structuring information in this format: `word: [[x0, y0], [x1, y1], [x2, y2], [x3, y3]]` (where x0, y1, etc. are coordinates of bounding boxes) ### Why do we need a parser? The parser is necessary because models require OCR data and bounding boxes as input. If this information is in JSON format, creating a prompt for the models becomes complex and may lead to confusion, resulting in undesirable outputs. The parser simplifies the process by converting the data into easily understandable text files. ## 2. DatasetGeneration.ipynb This notebook is the central tool for creating the dataset. In summary, it leverages OCR data and bounding boxes to prompt open-source LLMs, generating query-output tuples. The methodology draws inspiration from the paper on [Visual Instruction Tuning](https://arxiv.org/abs/2304.08485), which outlines the creation of three types of query-output tuples: 1. **Conversation Based:** Simple question-answer pairs related to the given image, covering a broad range of straightforward inquiries. Multiple conversation-based query-output tuples are generated for a single image to ensure comprehensiveness. 2. **Description:** This is not a typical question-answer pair. In this category the model generates detailed descriptions of the text depicted in the image. 3. **Complex Reasoning Based:** These questions delve deeper, requiring thoughtful consideration. Answering them involves understanding the visual content, followed by applying background knowledge or reasoning to provide a detailed response. Only one question-answer tuple of this nature is generated for each image. ## Output Parsing and Cleaning Functions Various parsers are implemented to process the model-generated output. Due to the unpredictable nature of LLM outputs, these parsers aren't flawless. However, by incorporating few-shot prompting and identifying common patterns in the LLM outputs, these parsers can handle a significant number of cases. Their primary function is to convert the raw output into a structured format for inclusion in the final database. Finally, the dataset generated has the following format: ``` [{ "id": id, "Image": Image, "ocr_text": data, "bbox_data": string, "conversation": [ { "Question": question, "Answer": answer } ], "description": string, "complex_reasoning": { "Question": question, "Answer": answer } }] ``` ### Model Used After multiple experiments, the most promising results were achieved using the [Mixtral_8x7b](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1) model. It demonstrated superior performance compared to Llama-2 70b for the specific task at hand. To execute these open-source models in the cloud, the services offered by Together.ai have been employed. ## Post Processing In this experiment, the output generated from two Language Models (LLMs) was processed to enhance the dataset quality. The LLMs used were [Platypus2](https://huggingface.co/garage-bAInd/Platypus2-70B-instruct) and [Mixtral_8x7b](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1). The process involved the following steps: ### Step 1: 1. **Generation and Evaluation:** Mixtral_8x7b generated the initial dataset, which was then evaluated and modified by Platypus2. Subsequently, the output from Platypus2 was further evaluated and modified by Mixtral_8x7b. ### Step 2: 2. **Judgment and Selection:** The outputs from both Mixtral_8x7b (final output of step 1) and Platypus2 (intermediate output of step 1) were assessed by [Mixtral_8x7b_Instruct](https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1). The best output was selected, and the dataset was updated accordingly. The pipeline can be summarized as follows: ``` Step 1: Mixtral_8x7b generates dataset --> Platypus2 evaluates and make changes --> Mixtral_8x7b evaluates it's changes Step 2: Mixtral_8x7b output (from Step 1's evaluation stage) --> Mixtral_8x7b_Instruct | Platypus2 output (from Step 1) ``` The resulting dataset, after this process, is named `post_processed_dataset.parquet`. Please note that only 50 data points were post-processed as part of this experiment. **Note:** While this post-processing experiment aimed to enhance the dataset's overall quality, manual observations did not reveal significant improvements.
shreyansh1347/GNHK-Synthetic-OCR-Dataset
[ "arxiv:2304.08485", "region:us" ]
2024-01-23T06:09:41+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "Image", "dtype": "Image"}, {"name": "ocr_text", "dtype": "string"}, {"name": "bbox_data", "dtype": "string"}, {"name": "conversation", "list": [{"name": "Question", "dtype": "string"}, {"name": "Answer", "dtype": "string"}]}, {"name": "description", "dtype": "string"}, {"name": "complex_reasoning", "struct": [{"name": "Question", "dtype": "string"}, {"name": "Answer", "dtype": "string"}]}]}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "dataset.parquet"}]}]}
2024-02-01T12:55:57+00:00
[ "2304.08485" ]
[]
TAGS #arxiv-2304.08485 #region-us
# GNHK Synthetic OCR Dataset ## Overview Welcome to the GNHK Synthetic OCR Dataset repository. Here I have generated synthetic data using GNHK Dataset, and Open Source LLMs like Mixtral. The dataset contains queries on the images and their answers. ## What's Inside? - Dataset Folder: The Dataset Folder contains the images, and corresponding to each image, there is a JSON file which carries the ocr information of that image - Parquet File: For easy handling and analysis, the processed dataset is saved as a Parquet file ('dataset.parquet'). This file contains images, their OCR text, one probable question per image, and its likely answer. # Methodology for Generation ## URL This Python notebook interacts with a dataset provided by GNHK, stored on Google Drive. The dataset consists of images, each accompanied by a JSON file containing OCR information for that image. The purpose of ParseJSON is to extract information from these JSON files, convert it into text files, and store these files in a folder named 'parsed_dataset' on the same Google Drive. ### What does it parse to? - ocr_data: It extracts OCR texts for words based on their 'line_index' and organizes them to represent the OCR text of the given image. - bbox_data: Another text file is generated by the parser, structuring information in this format: 'word: [[x0, y0], [x1, y1], [x2, y2], [x3, y3]]' (where x0, y1, etc. are coordinates of bounding boxes) ### Why do we need a parser? The parser is necessary because models require OCR data and bounding boxes as input. If this information is in JSON format, creating a prompt for the models becomes complex and may lead to confusion, resulting in undesirable outputs. The parser simplifies the process by converting the data into easily understandable text files. ## 2. URL This notebook is the central tool for creating the dataset. In summary, it leverages OCR data and bounding boxes to prompt open-source LLMs, generating query-output tuples. The methodology draws inspiration from the paper on Visual Instruction Tuning, which outlines the creation of three types of query-output tuples: 1. Conversation Based: Simple question-answer pairs related to the given image, covering a broad range of straightforward inquiries. Multiple conversation-based query-output tuples are generated for a single image to ensure comprehensiveness. 2. Description: This is not a typical question-answer pair. In this category the model generates detailed descriptions of the text depicted in the image. 3. Complex Reasoning Based: These questions delve deeper, requiring thoughtful consideration. Answering them involves understanding the visual content, followed by applying background knowledge or reasoning to provide a detailed response. Only one question-answer tuple of this nature is generated for each image. ## Output Parsing and Cleaning Functions Various parsers are implemented to process the model-generated output. Due to the unpredictable nature of LLM outputs, these parsers aren't flawless. However, by incorporating few-shot prompting and identifying common patterns in the LLM outputs, these parsers can handle a significant number of cases. Their primary function is to convert the raw output into a structured format for inclusion in the final database. Finally, the dataset generated has the following format: ### Model Used After multiple experiments, the most promising results were achieved using the Mixtral_8x7b model. It demonstrated superior performance compared to Llama-2 70b for the specific task at hand. To execute these open-source models in the cloud, the services offered by URL have been employed. ## Post Processing In this experiment, the output generated from two Language Models (LLMs) was processed to enhance the dataset quality. The LLMs used were Platypus2 and Mixtral_8x7b. The process involved the following steps: ### Step 1: 1. Generation and Evaluation: Mixtral_8x7b generated the initial dataset, which was then evaluated and modified by Platypus2. Subsequently, the output from Platypus2 was further evaluated and modified by Mixtral_8x7b. ### Step 2: 2. Judgment and Selection: The outputs from both Mixtral_8x7b (final output of step 1) and Platypus2 (intermediate output of step 1) were assessed by Mixtral_8x7b_Instruct. The best output was selected, and the dataset was updated accordingly. The pipeline can be summarized as follows: The resulting dataset, after this process, is named 'post_processed_dataset.parquet'. Please note that only 50 data points were post-processed as part of this experiment. Note: While this post-processing experiment aimed to enhance the dataset's overall quality, manual observations did not reveal significant improvements.
[ "# GNHK Synthetic OCR Dataset", "## Overview\n\nWelcome to the GNHK Synthetic OCR Dataset repository. Here I have generated synthetic data using GNHK Dataset, and Open Source LLMs like Mixtral. The dataset contains queries on the images and their answers.", "## What's Inside?\n\n- Dataset Folder: The Dataset Folder contains the images, and corresponding to each image, there is a JSON file which carries the ocr information of that image\n\n- Parquet File: For easy handling and analysis, the processed dataset is saved as a Parquet file ('dataset.parquet'). This file contains images, their OCR text, one probable question per image, and its likely answer.", "# Methodology for Generation", "## URL\n\nThis Python notebook interacts with a dataset provided by GNHK, stored on Google Drive. The dataset consists of images, each accompanied by a JSON file containing OCR information for that image. The purpose of ParseJSON is to extract information from these JSON files, convert it into text files, and store these files in a folder named 'parsed_dataset' on the same Google Drive.", "### What does it parse to?\n\n- ocr_data: It extracts OCR texts for words based on their 'line_index' and organizes them to represent the OCR text of the given image.\n\n- bbox_data: Another text file is generated by the parser, structuring information in this format:\n'word: [[x0, y0], [x1, y1], [x2, y2], [x3, y3]]'\n\n (where x0, y1, etc. are coordinates of bounding boxes)", "### Why do we need a parser?\nThe parser is necessary because models require OCR data and bounding boxes as input. If this information is in JSON format, creating a prompt for the models becomes complex and may lead to confusion, resulting in undesirable outputs. The parser simplifies the process by converting the data into easily understandable text files.", "## 2. URL\n\nThis notebook is the central tool for creating the dataset. In summary, it leverages OCR data and bounding boxes to prompt open-source LLMs, generating query-output tuples.\n\nThe methodology draws inspiration from the paper on Visual Instruction Tuning, which outlines the creation of three types of query-output tuples:\n\n1. Conversation Based: Simple question-answer pairs related to the given image, covering a broad range of straightforward inquiries. Multiple conversation-based query-output tuples are generated for a single image to ensure comprehensiveness.\n\n2. Description: This is not a typical question-answer pair. In this category the model generates detailed descriptions of the text depicted in the image.\n\n3. Complex Reasoning Based: These questions delve deeper, requiring thoughtful consideration. Answering them involves understanding the visual content, followed by applying background knowledge or reasoning to provide a detailed response. Only one question-answer tuple of this nature is generated for each image.", "## Output Parsing and Cleaning Functions\n\nVarious parsers are implemented to process the model-generated output. Due to the unpredictable nature of LLM outputs, these parsers aren't flawless. However, by incorporating few-shot prompting and identifying common patterns in the LLM outputs, these parsers can handle a significant number of cases. Their primary function is to convert the raw output into a structured format for inclusion in the final database.\n\n\n\nFinally, the dataset generated has the following format:", "### Model Used\n\nAfter multiple experiments, the most promising results were achieved using the Mixtral_8x7b model. It demonstrated superior performance compared to Llama-2 70b for the specific task at hand.\n\nTo execute these open-source models in the cloud, the services offered by URL have been employed.", "## Post Processing\n\nIn this experiment, the output generated from two Language Models (LLMs) was processed to enhance the dataset quality. The LLMs used were Platypus2 and Mixtral_8x7b. The process involved the following steps:", "### Step 1:\n1. Generation and Evaluation: Mixtral_8x7b generated the initial dataset, which was then evaluated and modified by Platypus2. Subsequently, the output from Platypus2 was further evaluated and modified by Mixtral_8x7b.", "### Step 2:\n2. Judgment and Selection: The outputs from both Mixtral_8x7b (final output of step 1) and Platypus2 (intermediate output of step 1) were assessed by Mixtral_8x7b_Instruct. The best output was selected, and the dataset was updated accordingly.\n\nThe pipeline can be summarized as follows:\n\n\nThe resulting dataset, after this process, is named 'post_processed_dataset.parquet'. Please note that only 50 data points were post-processed as part of this experiment.\n\nNote: While this post-processing experiment aimed to enhance the dataset's overall quality, manual observations did not reveal significant improvements." ]
[ "TAGS\n#arxiv-2304.08485 #region-us \n", "# GNHK Synthetic OCR Dataset", "## Overview\n\nWelcome to the GNHK Synthetic OCR Dataset repository. Here I have generated synthetic data using GNHK Dataset, and Open Source LLMs like Mixtral. The dataset contains queries on the images and their answers.", "## What's Inside?\n\n- Dataset Folder: The Dataset Folder contains the images, and corresponding to each image, there is a JSON file which carries the ocr information of that image\n\n- Parquet File: For easy handling and analysis, the processed dataset is saved as a Parquet file ('dataset.parquet'). This file contains images, their OCR text, one probable question per image, and its likely answer.", "# Methodology for Generation", "## URL\n\nThis Python notebook interacts with a dataset provided by GNHK, stored on Google Drive. The dataset consists of images, each accompanied by a JSON file containing OCR information for that image. The purpose of ParseJSON is to extract information from these JSON files, convert it into text files, and store these files in a folder named 'parsed_dataset' on the same Google Drive.", "### What does it parse to?\n\n- ocr_data: It extracts OCR texts for words based on their 'line_index' and organizes them to represent the OCR text of the given image.\n\n- bbox_data: Another text file is generated by the parser, structuring information in this format:\n'word: [[x0, y0], [x1, y1], [x2, y2], [x3, y3]]'\n\n (where x0, y1, etc. are coordinates of bounding boxes)", "### Why do we need a parser?\nThe parser is necessary because models require OCR data and bounding boxes as input. If this information is in JSON format, creating a prompt for the models becomes complex and may lead to confusion, resulting in undesirable outputs. The parser simplifies the process by converting the data into easily understandable text files.", "## 2. URL\n\nThis notebook is the central tool for creating the dataset. In summary, it leverages OCR data and bounding boxes to prompt open-source LLMs, generating query-output tuples.\n\nThe methodology draws inspiration from the paper on Visual Instruction Tuning, which outlines the creation of three types of query-output tuples:\n\n1. Conversation Based: Simple question-answer pairs related to the given image, covering a broad range of straightforward inquiries. Multiple conversation-based query-output tuples are generated for a single image to ensure comprehensiveness.\n\n2. Description: This is not a typical question-answer pair. In this category the model generates detailed descriptions of the text depicted in the image.\n\n3. Complex Reasoning Based: These questions delve deeper, requiring thoughtful consideration. Answering them involves understanding the visual content, followed by applying background knowledge or reasoning to provide a detailed response. Only one question-answer tuple of this nature is generated for each image.", "## Output Parsing and Cleaning Functions\n\nVarious parsers are implemented to process the model-generated output. Due to the unpredictable nature of LLM outputs, these parsers aren't flawless. However, by incorporating few-shot prompting and identifying common patterns in the LLM outputs, these parsers can handle a significant number of cases. Their primary function is to convert the raw output into a structured format for inclusion in the final database.\n\n\n\nFinally, the dataset generated has the following format:", "### Model Used\n\nAfter multiple experiments, the most promising results were achieved using the Mixtral_8x7b model. It demonstrated superior performance compared to Llama-2 70b for the specific task at hand.\n\nTo execute these open-source models in the cloud, the services offered by URL have been employed.", "## Post Processing\n\nIn this experiment, the output generated from two Language Models (LLMs) was processed to enhance the dataset quality. The LLMs used were Platypus2 and Mixtral_8x7b. The process involved the following steps:", "### Step 1:\n1. Generation and Evaluation: Mixtral_8x7b generated the initial dataset, which was then evaluated and modified by Platypus2. Subsequently, the output from Platypus2 was further evaluated and modified by Mixtral_8x7b.", "### Step 2:\n2. Judgment and Selection: The outputs from both Mixtral_8x7b (final output of step 1) and Platypus2 (intermediate output of step 1) were assessed by Mixtral_8x7b_Instruct. The best output was selected, and the dataset was updated accordingly.\n\nThe pipeline can be summarized as follows:\n\n\nThe resulting dataset, after this process, is named 'post_processed_dataset.parquet'. Please note that only 50 data points were post-processed as part of this experiment.\n\nNote: While this post-processing experiment aimed to enhance the dataset's overall quality, manual observations did not reveal significant improvements." ]
dd9a60bdea2cbfb9937ba4da97500489cd75fb14
Sri Lankan fuel prices (ceypetco) daily from 3/1/1990 to 1/1/2024 in LKR Date: The date on which the fuel prices are recorded. LP 95: The price of unleaded petrol with an octane rating of 95. LP 92: The price of unleaded petrol with an octane rating of 92. LAD: The price of auto diesel (diesel for automobiles). LSD: The price of low sulfur diesel. LK: The price of kerosene. LIK: The price of industrial kerosene. FUR. 800: The price of furnace oil with a viscosity of 800. FUR. 1500 (High): The price of high-viscosity furnace oil with a viscosity of 1500. FUR. 1500 (Low): The price of low-viscosity furnace oil with a viscosity of 1500.
ethanrom/sl_fuel_prices
[ "Fuel", "Sri Lanka", "region:us" ]
2024-01-23T06:29:41+00:00
{"tags": ["Fuel", "Sri Lanka"]}
2024-01-23T06:35:47+00:00
[]
[]
TAGS #Fuel #Sri Lanka #region-us
Sri Lankan fuel prices (ceypetco) daily from 3/1/1990 to 1/1/2024 in LKR Date: The date on which the fuel prices are recorded. LP 95: The price of unleaded petrol with an octane rating of 95. LP 92: The price of unleaded petrol with an octane rating of 92. LAD: The price of auto diesel (diesel for automobiles). LSD: The price of low sulfur diesel. LK: The price of kerosene. LIK: The price of industrial kerosene. FUR. 800: The price of furnace oil with a viscosity of 800. FUR. 1500 (High): The price of high-viscosity furnace oil with a viscosity of 1500. FUR. 1500 (Low): The price of low-viscosity furnace oil with a viscosity of 1500.
[]
[ "TAGS\n#Fuel #Sri Lanka #region-us \n" ]
4d3b4712003faa85d666689b9aa383d93e7274f7
# OPV2V-H dataset Based on the original OPV2V dataset, we supplemented 16-line, 32-line lidar data for each agent, as well as 4 depth cameras. Annotations will be shared with the original OPV2V, so please download the original OPV2V dataset as well. 1. Depth Data `OPV2V-H-depth.zip ` stores the depth camera data. You can directly uncompress them: ``` unzip OPV2V-H-depth.zip ``` 2. LiDAR Data `OPV2V-H-LiDAR-partxx` is a volume-compressed slice. They store 16-line and 32-line LiDAR data. You can unzip them using ``` cat OPV2V-H-LiDAR-part* > OPV2V-H-LiDAR.zip unzip OPV2V-H-LiDAR.zip ``` A desired output structure will be ``` ─ OPV2V_Hetero ├── test ├── train └── validate ```
yifanlu/OPV2V-H
[ "region:us" ]
2024-01-23T06:43:25+00:00
{}
2024-01-28T06:23:05+00:00
[]
[]
TAGS #region-us
# OPV2V-H dataset Based on the original OPV2V dataset, we supplemented 16-line, 32-line lidar data for each agent, as well as 4 depth cameras. Annotations will be shared with the original OPV2V, so please download the original OPV2V dataset as well. 1. Depth Data 'URL ' stores the depth camera data. You can directly uncompress them: 2. LiDAR Data 'OPV2V-H-LiDAR-partxx' is a volume-compressed slice. They store 16-line and 32-line LiDAR data. You can unzip them using A desired output structure will be
[ "# OPV2V-H dataset\n\nBased on the original OPV2V dataset, we supplemented 16-line, 32-line lidar data for each agent, as well as 4 depth cameras. Annotations will be shared with the original OPV2V, so please download the original OPV2V dataset as well.\n\n\n\n1. Depth Data\n\n 'URL ' stores the depth camera data. You can directly uncompress them:\n\n \n\n \n\n2. LiDAR Data\n\n 'OPV2V-H-LiDAR-partxx' is a volume-compressed slice. They store 16-line and 32-line LiDAR data. You can unzip them using\n\n \n\n \n\nA desired output structure will be" ]
[ "TAGS\n#region-us \n", "# OPV2V-H dataset\n\nBased on the original OPV2V dataset, we supplemented 16-line, 32-line lidar data for each agent, as well as 4 depth cameras. Annotations will be shared with the original OPV2V, so please download the original OPV2V dataset as well.\n\n\n\n1. Depth Data\n\n 'URL ' stores the depth camera data. You can directly uncompress them:\n\n \n\n \n\n2. LiDAR Data\n\n 'OPV2V-H-LiDAR-partxx' is a volume-compressed slice. They store 16-line and 32-line LiDAR data. You can unzip them using\n\n \n\n \n\nA desired output structure will be" ]
4569f50a6abc05fe122bb8a68769c964e4722716
Preprocessed DTU dataset for ICCV'21 paper [MVSDF](https://github.com/jzhangbs/MVSDF). Please download the files directly.
jzhangbs/mvsdf_dtu
[ "license:mit", "region:us" ]
2024-01-23T06:48:04+00:00
{"license": "mit"}
2024-01-23T06:50:29+00:00
[]
[]
TAGS #license-mit #region-us
Preprocessed DTU dataset for ICCV'21 paper MVSDF. Please download the files directly.
[]
[ "TAGS\n#license-mit #region-us \n" ]
87545243b64d6fe4aed1d6f33e36074e3f3d713d
Preprocessed DTU dataset for BMVC'20 paper [Vis-MVSNet](https://github.com/jzhangbs/Vis-MVSNet). Please download the files directly.
jzhangbs/vismvsnet_dtu
[ "license:mit", "region:us" ]
2024-01-23T06:50:34+00:00
{"license": "mit"}
2024-01-23T07:03:37+00:00
[]
[]
TAGS #license-mit #region-us
Preprocessed DTU dataset for BMVC'20 paper Vis-MVSNet. Please download the files directly.
[]
[ "TAGS\n#license-mit #region-us \n" ]
3ab7ff004cc7477ad296ed5282fc90b747367ca3
# Dataset Card for Evaluation run of abhishekchohan/mistral-7B-forest-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [abhishekchohan/mistral-7B-forest-v0.1](https://huggingface.co/abhishekchohan/mistral-7B-forest-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T06:57:46.714861](https://huggingface.co/datasets/open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-v0.1/blob/main/results_2024-01-23T06-57-46.714861.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6332771095620451, "acc_stderr": 0.032195930683822954, "acc_norm": 0.6396653083892934, "acc_norm_stderr": 0.03284463429523817, "mc1": 0.2839657282741738, "mc1_stderr": 0.015785370858396725, "mc2": 0.43702115490675825, "mc2_stderr": 0.014178223334158307 }, "harness|arc:challenge|25": { "acc": 0.5631399317406144, "acc_stderr": 0.014494421584256517, "acc_norm": 0.60580204778157, "acc_norm_stderr": 0.014280522667467325 }, "harness|hellaswag|10": { "acc": 0.6274646484763992, "acc_stderr": 0.004824917516374183, "acc_norm": 0.8313085042820155, "acc_norm_stderr": 0.003737138752336941 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.0421850621536888, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.0421850621536888 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6578947368421053, "acc_stderr": 0.03860731599316091, "acc_norm": 0.6578947368421053, "acc_norm_stderr": 0.03860731599316091 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6792452830188679, "acc_stderr": 0.028727502957880267, "acc_norm": 0.6792452830188679, "acc_norm_stderr": 0.028727502957880267 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6358381502890174, "acc_stderr": 0.03669072477416907, "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.03669072477416907 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.04655010411319616, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.04655010411319616 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932261, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932261 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.03223276266711712, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.03223276266711712 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.047028804320496165, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.047028804320496165 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.025107425481137282, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.025107425481137282 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.04390259265377562, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.04390259265377562 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7612903225806451, "acc_stderr": 0.02425107126220884, "acc_norm": 0.7612903225806451, "acc_norm_stderr": 0.02425107126220884 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.0351760354036101, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.0351760354036101 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7727272727272727, "acc_stderr": 0.029857515673386417, "acc_norm": 0.7727272727272727, "acc_norm_stderr": 0.029857515673386417 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8704663212435233, "acc_stderr": 0.02423353229775873, "acc_norm": 0.8704663212435233, "acc_norm_stderr": 0.02423353229775873 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6461538461538462, "acc_stderr": 0.024243783994062153, "acc_norm": 0.6461538461538462, "acc_norm_stderr": 0.024243783994062153 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.02938162072646507, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.02938162072646507 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6596638655462185, "acc_stderr": 0.030778057422931673, "acc_norm": 0.6596638655462185, "acc_norm_stderr": 0.030778057422931673 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389023, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389023 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8201834862385321, "acc_stderr": 0.016465345467391545, "acc_norm": 0.8201834862385321, "acc_norm_stderr": 0.016465345467391545 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5740740740740741, "acc_stderr": 0.03372343271653062, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.03372343271653062 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7941176470588235, "acc_stderr": 0.028379449451588667, "acc_norm": 0.7941176470588235, "acc_norm_stderr": 0.028379449451588667 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7721518987341772, "acc_stderr": 0.027303484599069432, "acc_norm": 0.7721518987341772, "acc_norm_stderr": 0.027303484599069432 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.034981493854624714, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.034981493854624714 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8349514563106796, "acc_stderr": 0.036756688322331886, "acc_norm": 0.8349514563106796, "acc_norm_stderr": 0.036756688322331886 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.022509033937077823, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.022509033937077823 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8199233716475096, "acc_stderr": 0.013740797258579823, "acc_norm": 0.8199233716475096, "acc_norm_stderr": 0.013740797258579823 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7109826589595376, "acc_stderr": 0.02440517393578323, "acc_norm": 0.7109826589595376, "acc_norm_stderr": 0.02440517393578323 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3195530726256983, "acc_stderr": 0.015595520294147397, "acc_norm": 0.3195530726256983, "acc_norm_stderr": 0.015595520294147397 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7483660130718954, "acc_stderr": 0.0248480182638752, "acc_norm": 0.7483660130718954, "acc_norm_stderr": 0.0248480182638752 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6977491961414791, "acc_stderr": 0.026082700695399665, "acc_norm": 0.6977491961414791, "acc_norm_stderr": 0.026082700695399665 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7314814814814815, "acc_stderr": 0.024659685185967284, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.024659685185967284 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422466, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422466 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.44784876140808344, "acc_stderr": 0.01270058240476822, "acc_norm": 0.44784876140808344, "acc_norm_stderr": 0.01270058240476822 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6875, "acc_stderr": 0.02815637344037142, "acc_norm": 0.6875, "acc_norm_stderr": 0.02815637344037142 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6748366013071896, "acc_stderr": 0.018950886770806304, "acc_norm": 0.6748366013071896, "acc_norm_stderr": 0.018950886770806304 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8159203980099502, "acc_stderr": 0.02740385941078685, "acc_norm": 0.8159203980099502, "acc_norm_stderr": 0.02740385941078685 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.03265986323710906, "acc_norm": 0.88, "acc_norm_stderr": 0.03265986323710906 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699122, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699122 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.2839657282741738, "mc1_stderr": 0.015785370858396725, "mc2": 0.43702115490675825, "mc2_stderr": 0.014178223334158307 }, "harness|winogrande|5": { "acc": 0.7805840568271507, "acc_stderr": 0.01163126836060778 }, "harness|gsm8k|5": { "acc": 0.3555724033358605, "acc_stderr": 0.013185402252713852 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-v0.1
[ "region:us" ]
2024-01-23T07:00:09+00:00
{"pretty_name": "Evaluation run of abhishekchohan/mistral-7B-forest-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [abhishekchohan/mistral-7B-forest-v0.1](https://huggingface.co/abhishekchohan/mistral-7B-forest-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T06:57:46.714861](https://huggingface.co/datasets/open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-v0.1/blob/main/results_2024-01-23T06-57-46.714861.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6332771095620451,\n \"acc_stderr\": 0.032195930683822954,\n \"acc_norm\": 0.6396653083892934,\n \"acc_norm_stderr\": 0.03284463429523817,\n \"mc1\": 0.2839657282741738,\n \"mc1_stderr\": 0.015785370858396725,\n \"mc2\": 0.43702115490675825,\n \"mc2_stderr\": 0.014178223334158307\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5631399317406144,\n \"acc_stderr\": 0.014494421584256517,\n \"acc_norm\": 0.60580204778157,\n \"acc_norm_stderr\": 0.014280522667467325\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6274646484763992,\n \"acc_stderr\": 0.004824917516374183,\n \"acc_norm\": 0.8313085042820155,\n \"acc_norm_stderr\": 0.003737138752336941\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6578947368421053,\n \"acc_stderr\": 0.03860731599316091,\n \"acc_norm\": 0.6578947368421053,\n \"acc_norm_stderr\": 0.03860731599316091\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.028727502957880267,\n \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.028727502957880267\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6358381502890174,\n \"acc_stderr\": 0.03669072477416907,\n \"acc_norm\": 0.6358381502890174,\n \"acc_norm_stderr\": 0.03669072477416907\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3235294117647059,\n \"acc_stderr\": 0.04655010411319616,\n \"acc_norm\": 0.3235294117647059,\n \"acc_norm_stderr\": 0.04655010411319616\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932261,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932261\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.047028804320496165,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.047028804320496165\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.025107425481137282,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.025107425481137282\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.04390259265377562,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.04390259265377562\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7612903225806451,\n \"acc_stderr\": 0.02425107126220884,\n \"acc_norm\": 0.7612903225806451,\n \"acc_norm_stderr\": 0.02425107126220884\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.0351760354036101,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.0351760354036101\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7727272727272727,\n \"acc_stderr\": 0.029857515673386417,\n \"acc_norm\": 0.7727272727272727,\n \"acc_norm_stderr\": 0.029857515673386417\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8704663212435233,\n \"acc_stderr\": 0.02423353229775873,\n \"acc_norm\": 0.8704663212435233,\n \"acc_norm_stderr\": 0.02423353229775873\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6461538461538462,\n \"acc_stderr\": 0.024243783994062153,\n \"acc_norm\": 0.6461538461538462,\n \"acc_norm_stderr\": 0.024243783994062153\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.02938162072646507,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.02938162072646507\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6596638655462185,\n \"acc_stderr\": 0.030778057422931673,\n \"acc_norm\": 0.6596638655462185,\n \"acc_norm_stderr\": 0.030778057422931673\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2847682119205298,\n \"acc_stderr\": 0.03684881521389023,\n \"acc_norm\": 0.2847682119205298,\n \"acc_norm_stderr\": 0.03684881521389023\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8201834862385321,\n \"acc_stderr\": 0.016465345467391545,\n \"acc_norm\": 0.8201834862385321,\n \"acc_norm_stderr\": 0.016465345467391545\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5740740740740741,\n \"acc_stderr\": 0.03372343271653062,\n \"acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.03372343271653062\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7941176470588235,\n \"acc_stderr\": 0.028379449451588667,\n \"acc_norm\": 0.7941176470588235,\n \"acc_norm_stderr\": 0.028379449451588667\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7721518987341772,\n \"acc_stderr\": 0.027303484599069432,\n \"acc_norm\": 0.7721518987341772,\n \"acc_norm_stderr\": 0.027303484599069432\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.034981493854624714,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.034981493854624714\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.036756688322331886,\n \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.036756688322331886\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.022509033937077823,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.022509033937077823\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8199233716475096,\n \"acc_stderr\": 0.013740797258579823,\n \"acc_norm\": 0.8199233716475096,\n \"acc_norm_stderr\": 0.013740797258579823\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7109826589595376,\n \"acc_stderr\": 0.02440517393578323,\n \"acc_norm\": 0.7109826589595376,\n \"acc_norm_stderr\": 0.02440517393578323\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3195530726256983,\n \"acc_stderr\": 0.015595520294147397,\n \"acc_norm\": 0.3195530726256983,\n \"acc_norm_stderr\": 0.015595520294147397\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7483660130718954,\n \"acc_stderr\": 0.0248480182638752,\n \"acc_norm\": 0.7483660130718954,\n \"acc_norm_stderr\": 0.0248480182638752\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6977491961414791,\n \"acc_stderr\": 0.026082700695399665,\n \"acc_norm\": 0.6977491961414791,\n \"acc_norm_stderr\": 0.026082700695399665\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.024659685185967284,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.024659685185967284\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422466,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422466\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.44784876140808344,\n \"acc_stderr\": 0.01270058240476822,\n \"acc_norm\": 0.44784876140808344,\n \"acc_norm_stderr\": 0.01270058240476822\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.02815637344037142,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.02815637344037142\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6748366013071896,\n \"acc_stderr\": 0.018950886770806304,\n \"acc_norm\": 0.6748366013071896,\n \"acc_norm_stderr\": 0.018950886770806304\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8159203980099502,\n \"acc_stderr\": 0.02740385941078685,\n \"acc_norm\": 0.8159203980099502,\n \"acc_norm_stderr\": 0.02740385941078685\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2839657282741738,\n \"mc1_stderr\": 0.015785370858396725,\n \"mc2\": 0.43702115490675825,\n \"mc2_stderr\": 0.014178223334158307\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7805840568271507,\n \"acc_stderr\": 0.01163126836060778\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3555724033358605,\n \"acc_stderr\": 0.013185402252713852\n }\n}\n```", "repo_url": "https://huggingface.co/abhishekchohan/mistral-7B-forest-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|arc:challenge|25_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|gsm8k|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hellaswag|10_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T06-57-46.714861.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["**/details_harness|winogrande|5_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T06-57-46.714861.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T06_57_46.714861", "path": ["results_2024-01-23T06-57-46.714861.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T06-57-46.714861.parquet"]}]}]}
2024-01-23T07:00:31+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of abhishekchohan/mistral-7B-forest-v0.1 Dataset automatically created during the evaluation run of model abhishekchohan/mistral-7B-forest-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T06:57:46.714861(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of abhishekchohan/mistral-7B-forest-v0.1\n\n\n\nDataset automatically created during the evaluation run of model abhishekchohan/mistral-7B-forest-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T06:57:46.714861(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of abhishekchohan/mistral-7B-forest-v0.1\n\n\n\nDataset automatically created during the evaluation run of model abhishekchohan/mistral-7B-forest-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T06:57:46.714861(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
33d77e59b58a0367436896ef173ceb6094b207b6
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> Fragen und Antworten für die Theorieprüfung SBF Binnen Segeln. ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
sysfox/segeln_binnen
[ "license:apache-2.0", "region:us" ]
2024-01-23T07:10:12+00:00
{"license": "apache-2.0"}
2024-01-23T07:31:50+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
# Dataset Card for Dataset Name Fragen und Antworten für die Theorieprüfung SBF Binnen Segeln. ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name\n\n\n\nFragen und Antworten für die Theorieprüfung SBF Binnen Segeln.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "# Dataset Card for Dataset Name\n\n\n\nFragen und Antworten für die Theorieprüfung SBF Binnen Segeln.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
e579211202063982c416673b92ae5a40a5cd255f
This dataset is a catalan version of [bitext/Bitext-customer-support-llm-chatbot-training-dataset](https://huggingface.co/datasets/bitext/Bitext-customer-support-llm-chatbot-training-dataset). The translation is made with (Helsinki-NLP)[https://huggingface.co/Helsinki-NLP] catalan translation model
ericrisco/customer_service_chatbot_ca
[ "region:us" ]
2024-01-23T07:41:25+00:00
{"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 201229.2, "num_examples": 450}, {"name": "test", "num_bytes": 22358.8, "num_examples": 50}], "download_size": 109055, "dataset_size": 223588.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]}
2024-01-25T10:43:58+00:00
[]
[]
TAGS #region-us
This dataset is a catalan version of bitext/Bitext-customer-support-llm-chatbot-training-dataset. The translation is made with (Helsinki-NLP)[URL catalan translation model
[]
[ "TAGS\n#region-us \n" ]
1dbaffa3feff8af361022be3157edb268f4d65d5
# Dataset Card for Evaluation run of Epiculous/Crunchy-onion <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Epiculous/Crunchy-onion](https://huggingface.co/Epiculous/Crunchy-onion) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Epiculous__Crunchy-onion", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T07:42:32.669813](https://huggingface.co/datasets/open-llm-leaderboard/details_Epiculous__Crunchy-onion/blob/main/results_2024-01-23T07-42-32.669813.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6967211080887986, "acc_stderr": 0.030785488462122614, "acc_norm": 0.7024337864338088, "acc_norm_stderr": 0.031360406648538196, "mc1": 0.4810281517747858, "mc1_stderr": 0.017490896405762346, "mc2": 0.6387747489748049, "mc2_stderr": 0.01598416710685737 }, "harness|arc:challenge|25": { "acc": 0.6390784982935154, "acc_stderr": 0.014034761386175456, "acc_norm": 0.6715017064846417, "acc_norm_stderr": 0.013724978465537304 }, "harness|hellaswag|10": { "acc": 0.6981676956781517, "acc_stderr": 0.0045811472479632, "acc_norm": 0.8618801035650269, "acc_norm_stderr": 0.003443206472757467 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6888888888888889, "acc_stderr": 0.0399926287661772, "acc_norm": 0.6888888888888889, "acc_norm_stderr": 0.0399926287661772 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7894736842105263, "acc_stderr": 0.033176727875331574, "acc_norm": 0.7894736842105263, "acc_norm_stderr": 0.033176727875331574 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.71, "acc_stderr": 0.04560480215720684, "acc_norm": 0.71, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7547169811320755, "acc_stderr": 0.0264803571798957, "acc_norm": 0.7547169811320755, "acc_norm_stderr": 0.0264803571798957 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7986111111111112, "acc_stderr": 0.033536474697138406, "acc_norm": 0.7986111111111112, "acc_norm_stderr": 0.033536474697138406 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7456647398843931, "acc_stderr": 0.0332055644308557, "acc_norm": 0.7456647398843931, "acc_norm_stderr": 0.0332055644308557 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.049135952012744975, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.049135952012744975 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6808510638297872, "acc_stderr": 0.030472973363380042, "acc_norm": 0.6808510638297872, "acc_norm_stderr": 0.030472973363380042 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6620689655172414, "acc_stderr": 0.039417076320648906, "acc_norm": 0.6620689655172414, "acc_norm_stderr": 0.039417076320648906 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.48412698412698413, "acc_stderr": 0.025738330639412152, "acc_norm": 0.48412698412698413, "acc_norm_stderr": 0.025738330639412152 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5238095238095238, "acc_stderr": 0.04467062628403273, "acc_norm": 0.5238095238095238, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8516129032258064, "acc_stderr": 0.020222737554330378, "acc_norm": 0.8516129032258064, "acc_norm_stderr": 0.020222737554330378 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5763546798029556, "acc_stderr": 0.03476725747649038, "acc_norm": 0.5763546798029556, "acc_norm_stderr": 0.03476725747649038 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9378238341968912, "acc_stderr": 0.01742697415424053, "acc_norm": 0.9378238341968912, "acc_norm_stderr": 0.01742697415424053 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7, "acc_stderr": 0.023234581088428494, "acc_norm": 0.7, "acc_norm_stderr": 0.023234581088428494 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37407407407407406, "acc_stderr": 0.02950286112895529, "acc_norm": 0.37407407407407406, "acc_norm_stderr": 0.02950286112895529 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7521008403361344, "acc_stderr": 0.028047967224176892, "acc_norm": 0.7521008403361344, "acc_norm_stderr": 0.028047967224176892 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4768211920529801, "acc_stderr": 0.04078093859163083, "acc_norm": 0.4768211920529801, "acc_norm_stderr": 0.04078093859163083 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8660550458715597, "acc_stderr": 0.014602811435592635, "acc_norm": 0.8660550458715597, "acc_norm_stderr": 0.014602811435592635 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5787037037037037, "acc_stderr": 0.03367462138896078, "acc_norm": 0.5787037037037037, "acc_norm_stderr": 0.03367462138896078 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02552472232455334, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02552472232455334 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.869198312236287, "acc_stderr": 0.02194876605947076, "acc_norm": 0.869198312236287, "acc_norm_stderr": 0.02194876605947076 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7174887892376681, "acc_stderr": 0.030216831011508766, "acc_norm": 0.7174887892376681, "acc_norm_stderr": 0.030216831011508766 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8091603053435115, "acc_stderr": 0.03446513350752599, "acc_norm": 0.8091603053435115, "acc_norm_stderr": 0.03446513350752599 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035202, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035202 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.032910995786157686, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.032910995786157686 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5714285714285714, "acc_stderr": 0.04697113923010213, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.04697113923010213 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.0349260647662379, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.0349260647662379 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9273504273504274, "acc_stderr": 0.017004368568132366, "acc_norm": 0.9273504273504274, "acc_norm_stderr": 0.017004368568132366 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8735632183908046, "acc_stderr": 0.011884488905895533, "acc_norm": 0.8735632183908046, "acc_norm_stderr": 0.011884488905895533 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7658959537572254, "acc_stderr": 0.02279711027807113, "acc_norm": 0.7658959537572254, "acc_norm_stderr": 0.02279711027807113 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.45027932960893857, "acc_stderr": 0.016639615236845807, "acc_norm": 0.45027932960893857, "acc_norm_stderr": 0.016639615236845807 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8104575163398693, "acc_stderr": 0.022442358263336206, "acc_norm": 0.8104575163398693, "acc_norm_stderr": 0.022442358263336206 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7684887459807074, "acc_stderr": 0.023956532766639133, "acc_norm": 0.7684887459807074, "acc_norm_stderr": 0.023956532766639133 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8148148148148148, "acc_stderr": 0.0216138093952248, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.0216138093952248 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5425531914893617, "acc_stderr": 0.02971928127223684, "acc_norm": 0.5425531914893617, "acc_norm_stderr": 0.02971928127223684 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5430247718383312, "acc_stderr": 0.01272286950161142, "acc_norm": 0.5430247718383312, "acc_norm_stderr": 0.01272286950161142 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7757352941176471, "acc_stderr": 0.025336848563332386, "acc_norm": 0.7757352941176471, "acc_norm_stderr": 0.025336848563332386 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7271241830065359, "acc_stderr": 0.018020474148393577, "acc_norm": 0.7271241830065359, "acc_norm_stderr": 0.018020474148393577 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7755102040816326, "acc_stderr": 0.0267114305555384, "acc_norm": 0.7755102040816326, "acc_norm_stderr": 0.0267114305555384 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8706467661691543, "acc_stderr": 0.02372983088101853, "acc_norm": 0.8706467661691543, "acc_norm_stderr": 0.02372983088101853 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8654970760233918, "acc_stderr": 0.026168221344662297, "acc_norm": 0.8654970760233918, "acc_norm_stderr": 0.026168221344662297 }, "harness|truthfulqa:mc|0": { "mc1": 0.4810281517747858, "mc1_stderr": 0.017490896405762346, "mc2": 0.6387747489748049, "mc2_stderr": 0.01598416710685737 }, "harness|winogrande|5": { "acc": 0.7324388318863457, "acc_stderr": 0.01244171845689301 }, "harness|gsm8k|5": { "acc": 0.5200909780136467, "acc_stderr": 0.013761361772989016 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Epiculous__Crunchy-onion
[ "region:us" ]
2024-01-23T07:44:50+00:00
{"pretty_name": "Evaluation run of Epiculous/Crunchy-onion", "dataset_summary": "Dataset automatically created during the evaluation run of model [Epiculous/Crunchy-onion](https://huggingface.co/Epiculous/Crunchy-onion) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Epiculous__Crunchy-onion\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T07:42:32.669813](https://huggingface.co/datasets/open-llm-leaderboard/details_Epiculous__Crunchy-onion/blob/main/results_2024-01-23T07-42-32.669813.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6967211080887986,\n \"acc_stderr\": 0.030785488462122614,\n \"acc_norm\": 0.7024337864338088,\n \"acc_norm_stderr\": 0.031360406648538196,\n \"mc1\": 0.4810281517747858,\n \"mc1_stderr\": 0.017490896405762346,\n \"mc2\": 0.6387747489748049,\n \"mc2_stderr\": 0.01598416710685737\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6390784982935154,\n \"acc_stderr\": 0.014034761386175456,\n \"acc_norm\": 0.6715017064846417,\n \"acc_norm_stderr\": 0.013724978465537304\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6981676956781517,\n \"acc_stderr\": 0.0045811472479632,\n \"acc_norm\": 0.8618801035650269,\n \"acc_norm_stderr\": 0.003443206472757467\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6888888888888889,\n \"acc_stderr\": 0.0399926287661772,\n \"acc_norm\": 0.6888888888888889,\n \"acc_norm_stderr\": 0.0399926287661772\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7894736842105263,\n \"acc_stderr\": 0.033176727875331574,\n \"acc_norm\": 0.7894736842105263,\n \"acc_norm_stderr\": 0.033176727875331574\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7547169811320755,\n \"acc_stderr\": 0.0264803571798957,\n \"acc_norm\": 0.7547169811320755,\n \"acc_norm_stderr\": 0.0264803571798957\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7986111111111112,\n \"acc_stderr\": 0.033536474697138406,\n \"acc_norm\": 0.7986111111111112,\n \"acc_norm_stderr\": 0.033536474697138406\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7456647398843931,\n \"acc_stderr\": 0.0332055644308557,\n \"acc_norm\": 0.7456647398843931,\n \"acc_norm_stderr\": 0.0332055644308557\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.049135952012744975,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.049135952012744975\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6808510638297872,\n \"acc_stderr\": 0.030472973363380042,\n \"acc_norm\": 0.6808510638297872,\n \"acc_norm_stderr\": 0.030472973363380042\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6620689655172414,\n \"acc_stderr\": 0.039417076320648906,\n \"acc_norm\": 0.6620689655172414,\n \"acc_norm_stderr\": 0.039417076320648906\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.48412698412698413,\n \"acc_stderr\": 0.025738330639412152,\n \"acc_norm\": 0.48412698412698413,\n \"acc_norm_stderr\": 0.025738330639412152\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5238095238095238,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.5238095238095238,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8516129032258064,\n \"acc_stderr\": 0.020222737554330378,\n \"acc_norm\": 0.8516129032258064,\n \"acc_norm_stderr\": 0.020222737554330378\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5763546798029556,\n \"acc_stderr\": 0.03476725747649038,\n \"acc_norm\": 0.5763546798029556,\n \"acc_norm_stderr\": 0.03476725747649038\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9378238341968912,\n \"acc_stderr\": 0.01742697415424053,\n \"acc_norm\": 0.9378238341968912,\n \"acc_norm_stderr\": 0.01742697415424053\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.023234581088428494,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.023234581088428494\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37407407407407406,\n \"acc_stderr\": 0.02950286112895529,\n \"acc_norm\": 0.37407407407407406,\n \"acc_norm_stderr\": 0.02950286112895529\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7521008403361344,\n \"acc_stderr\": 0.028047967224176892,\n \"acc_norm\": 0.7521008403361344,\n \"acc_norm_stderr\": 0.028047967224176892\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4768211920529801,\n \"acc_stderr\": 0.04078093859163083,\n \"acc_norm\": 0.4768211920529801,\n \"acc_norm_stderr\": 0.04078093859163083\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8660550458715597,\n \"acc_stderr\": 0.014602811435592635,\n \"acc_norm\": 0.8660550458715597,\n \"acc_norm_stderr\": 0.014602811435592635\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5787037037037037,\n \"acc_stderr\": 0.03367462138896078,\n \"acc_norm\": 0.5787037037037037,\n \"acc_norm_stderr\": 0.03367462138896078\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02552472232455334,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02552472232455334\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.869198312236287,\n \"acc_stderr\": 0.02194876605947076,\n \"acc_norm\": 0.869198312236287,\n \"acc_norm_stderr\": 0.02194876605947076\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7174887892376681,\n \"acc_stderr\": 0.030216831011508766,\n \"acc_norm\": 0.7174887892376681,\n \"acc_norm_stderr\": 0.030216831011508766\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8091603053435115,\n \"acc_stderr\": 0.03446513350752599,\n \"acc_norm\": 0.8091603053435115,\n \"acc_norm_stderr\": 0.03446513350752599\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035202,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035202\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.032910995786157686,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.032910995786157686\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5714285714285714,\n \"acc_stderr\": 0.04697113923010213,\n \"acc_norm\": 0.5714285714285714,\n \"acc_norm_stderr\": 0.04697113923010213\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.0349260647662379,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.0349260647662379\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9273504273504274,\n \"acc_stderr\": 0.017004368568132366,\n \"acc_norm\": 0.9273504273504274,\n \"acc_norm_stderr\": 0.017004368568132366\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8735632183908046,\n \"acc_stderr\": 0.011884488905895533,\n \"acc_norm\": 0.8735632183908046,\n \"acc_norm_stderr\": 0.011884488905895533\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7658959537572254,\n \"acc_stderr\": 0.02279711027807113,\n \"acc_norm\": 0.7658959537572254,\n \"acc_norm_stderr\": 0.02279711027807113\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.45027932960893857,\n \"acc_stderr\": 0.016639615236845807,\n \"acc_norm\": 0.45027932960893857,\n \"acc_norm_stderr\": 0.016639615236845807\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8104575163398693,\n \"acc_stderr\": 0.022442358263336206,\n \"acc_norm\": 0.8104575163398693,\n \"acc_norm_stderr\": 0.022442358263336206\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7684887459807074,\n \"acc_stderr\": 0.023956532766639133,\n \"acc_norm\": 0.7684887459807074,\n \"acc_norm_stderr\": 0.023956532766639133\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.0216138093952248,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.0216138093952248\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5425531914893617,\n \"acc_stderr\": 0.02971928127223684,\n \"acc_norm\": 0.5425531914893617,\n \"acc_norm_stderr\": 0.02971928127223684\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5430247718383312,\n \"acc_stderr\": 0.01272286950161142,\n \"acc_norm\": 0.5430247718383312,\n \"acc_norm_stderr\": 0.01272286950161142\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7757352941176471,\n \"acc_stderr\": 0.025336848563332386,\n \"acc_norm\": 0.7757352941176471,\n \"acc_norm_stderr\": 0.025336848563332386\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7271241830065359,\n \"acc_stderr\": 0.018020474148393577,\n \"acc_norm\": 0.7271241830065359,\n \"acc_norm_stderr\": 0.018020474148393577\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7755102040816326,\n \"acc_stderr\": 0.0267114305555384,\n \"acc_norm\": 0.7755102040816326,\n \"acc_norm_stderr\": 0.0267114305555384\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8706467661691543,\n \"acc_stderr\": 0.02372983088101853,\n \"acc_norm\": 0.8706467661691543,\n \"acc_norm_stderr\": 0.02372983088101853\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8654970760233918,\n \"acc_stderr\": 0.026168221344662297,\n \"acc_norm\": 0.8654970760233918,\n \"acc_norm_stderr\": 0.026168221344662297\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4810281517747858,\n \"mc1_stderr\": 0.017490896405762346,\n \"mc2\": 0.6387747489748049,\n \"mc2_stderr\": 0.01598416710685737\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7324388318863457,\n \"acc_stderr\": 0.01244171845689301\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5200909780136467,\n \"acc_stderr\": 0.013761361772989016\n }\n}\n```", "repo_url": "https://huggingface.co/Epiculous/Crunchy-onion", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|arc:challenge|25_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|gsm8k|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hellaswag|10_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T07-42-32.669813.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["**/details_harness|winogrande|5_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T07-42-32.669813.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T07_42_32.669813", "path": ["results_2024-01-23T07-42-32.669813.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T07-42-32.669813.parquet"]}]}]}
2024-01-23T07:45:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Epiculous/Crunchy-onion Dataset automatically created during the evaluation run of model Epiculous/Crunchy-onion on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T07:42:32.669813(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Epiculous/Crunchy-onion\n\n\n\nDataset automatically created during the evaluation run of model Epiculous/Crunchy-onion on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T07:42:32.669813(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Epiculous/Crunchy-onion\n\n\n\nDataset automatically created during the evaluation run of model Epiculous/Crunchy-onion on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T07:42:32.669813(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
916bd78268978049292b5bd4ebd72a0f7c9b8ecf
# Dataset Card for Evaluation run of flemmingmiguel/MBX-7B-v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [flemmingmiguel/MBX-7B-v2](https://huggingface.co/flemmingmiguel/MBX-7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 6 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_flemmingmiguel__MBX-7B-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-29T01:58:59.417725](https://huggingface.co/datasets/open-llm-leaderboard/details_flemmingmiguel__MBX-7B-v2/blob/main/results_2024-01-29T01-58-59.417725.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6539700518752208, "acc_stderr": 0.03202984813423276, "acc_norm": 0.6533129460161269, "acc_norm_stderr": 0.0326981687674504, "mc1": 0.5679314565483476, "mc1_stderr": 0.01734120239498833, "mc2": 0.7021318083151993, "mc2_stderr": 0.014958406447536246 }, "harness|arc:challenge|25": { "acc": 0.7056313993174061, "acc_stderr": 0.01331852846053942, "acc_norm": 0.735494880546075, "acc_norm_stderr": 0.012889272949313368 }, "harness|hellaswag|10": { "acc": 0.717486556462856, "acc_stderr": 0.004493015945599716, "acc_norm": 0.8849830711013742, "acc_norm_stderr": 0.0031839033919416975 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6592592592592592, "acc_stderr": 0.040943762699967926, "acc_norm": 0.6592592592592592, "acc_norm_stderr": 0.040943762699967926 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7283018867924528, "acc_stderr": 0.027377706624670713, "acc_norm": 0.7283018867924528, "acc_norm_stderr": 0.027377706624670713 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.048786087144669955, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.048786087144669955 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146268, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146268 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555498, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555498 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4126984126984127, "acc_stderr": 0.025355741263055273, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.025355741263055273 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.02341529343356853, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.02341529343356853 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.032876667586034906, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.032876667586034906 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586815, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586815 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033456, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033456 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6717948717948717, "acc_stderr": 0.023807633198657266, "acc_norm": 0.6717948717948717, "acc_norm_stderr": 0.023807633198657266 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.028578348365473082, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.028578348365473082 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6848739495798319, "acc_stderr": 0.030176808288974337, "acc_norm": 0.6848739495798319, "acc_norm_stderr": 0.030176808288974337 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8513761467889909, "acc_stderr": 0.015251253773660834, "acc_norm": 0.8513761467889909, "acc_norm_stderr": 0.015251253773660834 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5231481481481481, "acc_stderr": 0.03406315360711507, "acc_norm": 0.5231481481481481, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.02450980392156861, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.02450980392156861 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290913, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290913 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.047268355537191, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.047268355537191 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.020588491316092375, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.020588491316092375 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8326947637292464, "acc_stderr": 0.013347327202920332, "acc_norm": 0.8326947637292464, "acc_norm_stderr": 0.013347327202920332 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7427745664739884, "acc_stderr": 0.023532925431044287, "acc_norm": 0.7427745664739884, "acc_norm_stderr": 0.023532925431044287 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4346368715083799, "acc_stderr": 0.016578997435496713, "acc_norm": 0.4346368715083799, "acc_norm_stderr": 0.016578997435496713 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7287581699346405, "acc_stderr": 0.02545775669666788, "acc_norm": 0.7287581699346405, "acc_norm_stderr": 0.02545775669666788 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7234726688102894, "acc_stderr": 0.02540383297817961, "acc_norm": 0.7234726688102894, "acc_norm_stderr": 0.02540383297817961 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7561728395061729, "acc_stderr": 0.023891879541959607, "acc_norm": 0.7561728395061729, "acc_norm_stderr": 0.023891879541959607 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4706649282920469, "acc_stderr": 0.012748238397365549, "acc_norm": 0.4706649282920469, "acc_norm_stderr": 0.012748238397365549 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.02841820861940676, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.02841820861940676 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.684640522875817, "acc_stderr": 0.018798086284886887, "acc_norm": 0.684640522875817, "acc_norm_stderr": 0.018798086284886887 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.02812342933514278, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.02812342933514278 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774709, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-virology|5": { "acc": 0.5421686746987951, "acc_stderr": 0.0387862677100236, "acc_norm": 0.5421686746987951, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.5679314565483476, "mc1_stderr": 0.01734120239498833, "mc2": 0.7021318083151993, "mc2_stderr": 0.014958406447536246 }, "harness|winogrande|5": { "acc": 0.8389897395422258, "acc_stderr": 0.010329712832785725 }, "harness|gsm8k|5": { "acc": 0.7050796057619408, "acc_stderr": 0.012560698010954767 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_flemmingmiguel__MBX-7B-v2
[ "region:us" ]
2024-01-23T07:47:17+00:00
{"pretty_name": "Evaluation run of flemmingmiguel/MBX-7B-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [flemmingmiguel/MBX-7B-v2](https://huggingface.co/flemmingmiguel/MBX-7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 6 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_flemmingmiguel__MBX-7B-v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-29T01:58:59.417725](https://huggingface.co/datasets/open-llm-leaderboard/details_flemmingmiguel__MBX-7B-v2/blob/main/results_2024-01-29T01-58-59.417725.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6539700518752208,\n \"acc_stderr\": 0.03202984813423276,\n \"acc_norm\": 0.6533129460161269,\n \"acc_norm_stderr\": 0.0326981687674504,\n \"mc1\": 0.5679314565483476,\n \"mc1_stderr\": 0.01734120239498833,\n \"mc2\": 0.7021318083151993,\n \"mc2_stderr\": 0.014958406447536246\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7056313993174061,\n \"acc_stderr\": 0.01331852846053942,\n \"acc_norm\": 0.735494880546075,\n \"acc_norm_stderr\": 0.012889272949313368\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.717486556462856,\n \"acc_stderr\": 0.004493015945599716,\n \"acc_norm\": 0.8849830711013742,\n \"acc_norm_stderr\": 0.0031839033919416975\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6592592592592592,\n \"acc_stderr\": 0.040943762699967926,\n \"acc_norm\": 0.6592592592592592,\n \"acc_norm_stderr\": 0.040943762699967926\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7283018867924528,\n \"acc_stderr\": 0.027377706624670713,\n \"acc_norm\": 0.7283018867924528,\n \"acc_norm_stderr\": 0.027377706624670713\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.048786087144669955,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.048786087144669955\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146268,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146268\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4126984126984127,\n \"acc_stderr\": 0.025355741263055273,\n \"acc_norm\": 0.4126984126984127,\n \"acc_norm_stderr\": 0.025355741263055273\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.02341529343356853,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.02341529343356853\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.032876667586034906,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.032876667586034906\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586815,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586815\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6717948717948717,\n \"acc_stderr\": 0.023807633198657266,\n \"acc_norm\": 0.6717948717948717,\n \"acc_norm_stderr\": 0.023807633198657266\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32592592592592595,\n \"acc_stderr\": 0.028578348365473082,\n \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.028578348365473082\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8513761467889909,\n \"acc_stderr\": 0.015251253773660834,\n \"acc_norm\": 0.8513761467889909,\n \"acc_norm_stderr\": 0.015251253773660834\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.02450980392156861,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.02450980392156861\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290913,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290913\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.020588491316092375,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.020588491316092375\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8326947637292464,\n \"acc_stderr\": 0.013347327202920332,\n \"acc_norm\": 0.8326947637292464,\n \"acc_norm_stderr\": 0.013347327202920332\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7427745664739884,\n \"acc_stderr\": 0.023532925431044287,\n \"acc_norm\": 0.7427745664739884,\n \"acc_norm_stderr\": 0.023532925431044287\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4346368715083799,\n \"acc_stderr\": 0.016578997435496713,\n \"acc_norm\": 0.4346368715083799,\n \"acc_norm_stderr\": 0.016578997435496713\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7287581699346405,\n \"acc_stderr\": 0.02545775669666788,\n \"acc_norm\": 0.7287581699346405,\n \"acc_norm_stderr\": 0.02545775669666788\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7234726688102894,\n \"acc_stderr\": 0.02540383297817961,\n \"acc_norm\": 0.7234726688102894,\n \"acc_norm_stderr\": 0.02540383297817961\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7561728395061729,\n \"acc_stderr\": 0.023891879541959607,\n \"acc_norm\": 0.7561728395061729,\n \"acc_norm_stderr\": 0.023891879541959607\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4706649282920469,\n \"acc_stderr\": 0.012748238397365549,\n \"acc_norm\": 0.4706649282920469,\n \"acc_norm_stderr\": 0.012748238397365549\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.684640522875817,\n \"acc_stderr\": 0.018798086284886887,\n \"acc_norm\": 0.684640522875817,\n \"acc_norm_stderr\": 0.018798086284886887\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.02812342933514278,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.02812342933514278\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5679314565483476,\n \"mc1_stderr\": 0.01734120239498833,\n \"mc2\": 0.7021318083151993,\n \"mc2_stderr\": 0.014958406447536246\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8389897395422258,\n \"acc_stderr\": 0.010329712832785725\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7050796057619408,\n \"acc_stderr\": 0.012560698010954767\n }\n}\n```", "repo_url": "https://huggingface.co/flemmingmiguel/MBX-7B-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|arc:challenge|25_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|arc:challenge|25_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|arc:challenge|25_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|arc:challenge|25_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|arc:challenge|25_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|arc:challenge|25_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|gsm8k|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|gsm8k|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|gsm8k|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|gsm8k|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|gsm8k|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|gsm8k|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hellaswag|10_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hellaswag|10_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hellaswag|10_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hellaswag|10_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hellaswag|10_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hellaswag|10_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T07-44-55.248174.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T18-17-28.506209.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T23-42-41.090832.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T06-20-52.562143.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T22-47-13.183013.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-29T01-58-59.417725.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["**/details_harness|winogrande|5_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["**/details_harness|winogrande|5_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["**/details_harness|winogrande|5_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["**/details_harness|winogrande|5_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["**/details_harness|winogrande|5_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["**/details_harness|winogrande|5_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-29T01-58-59.417725.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T07_44_55.248174", "path": ["results_2024-01-23T07-44-55.248174.parquet"]}, {"split": "2024_01_23T18_17_28.506209", "path": ["results_2024-01-23T18-17-28.506209.parquet"]}, {"split": "2024_01_23T23_42_41.090832", "path": ["results_2024-01-23T23-42-41.090832.parquet"]}, {"split": "2024_01_25T06_20_52.562143", "path": ["results_2024-01-25T06-20-52.562143.parquet"]}, {"split": "2024_01_25T22_47_13.183013", "path": ["results_2024-01-25T22-47-13.183013.parquet"]}, {"split": "2024_01_29T01_58_59.417725", "path": ["results_2024-01-29T01-58-59.417725.parquet"]}, {"split": "latest", "path": ["results_2024-01-29T01-58-59.417725.parquet"]}]}]}
2024-01-29T02:01:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of flemmingmiguel/MBX-7B-v2 Dataset automatically created during the evaluation run of model flemmingmiguel/MBX-7B-v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 6 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-29T01:58:59.417725(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of flemmingmiguel/MBX-7B-v2\n\n\n\nDataset automatically created during the evaluation run of model flemmingmiguel/MBX-7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 6 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-29T01:58:59.417725(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of flemmingmiguel/MBX-7B-v2\n\n\n\nDataset automatically created during the evaluation run of model flemmingmiguel/MBX-7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 6 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-29T01:58:59.417725(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
f5799065fbc799d0753277f51763e1b3c51c26ce
# Dataset Card for Evaluation run of jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B](https://huggingface.co/jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jsfs11__WestOrcaNeuralMarco-DPO-v2-DARETIES-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T07:53:57.376778](https://huggingface.co/datasets/open-llm-leaderboard/details_jsfs11__WestOrcaNeuralMarco-DPO-v2-DARETIES-7B/blob/main/results_2024-01-23T07-53-57.376778.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.655303418618015, "acc_stderr": 0.03206026718388969, "acc_norm": 0.6549524807930509, "acc_norm_stderr": 0.03272590499883366, "mc1": 0.5165238678090576, "mc1_stderr": 0.01749394019005772, "mc2": 0.6596210038626965, "mc2_stderr": 0.015322599620782891 }, "harness|arc:challenge|25": { "acc": 0.6945392491467577, "acc_stderr": 0.013460080478002508, "acc_norm": 0.7192832764505119, "acc_norm_stderr": 0.01313123812697557 }, "harness|hellaswag|10": { "acc": 0.7074287990440151, "acc_stderr": 0.0045401340050603214, "acc_norm": 0.880601473809998, "acc_norm_stderr": 0.003235941810943157 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6592592592592592, "acc_stderr": 0.04094376269996792, "acc_norm": 0.6592592592592592, "acc_norm_stderr": 0.04094376269996792 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.03738520676119669, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.03738520676119669 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695238, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695238 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.720754716981132, "acc_stderr": 0.027611163402399715, "acc_norm": 0.720754716981132, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6878612716763006, "acc_stderr": 0.03533133389323657, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.03533133389323657 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5914893617021276, "acc_stderr": 0.032134180267015755, "acc_norm": 0.5914893617021276, "acc_norm_stderr": 0.032134180267015755 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5175438596491229, "acc_stderr": 0.04700708033551038, "acc_norm": 0.5175438596491229, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370332, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370332 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.025487187147859375, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.025487187147859375 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7903225806451613, "acc_stderr": 0.023157879349083522, "acc_norm": 0.7903225806451613, "acc_norm_stderr": 0.023157879349083522 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.032568666616811015, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.032568666616811015 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8131313131313131, "acc_stderr": 0.027772533334218967, "acc_norm": 0.8131313131313131, "acc_norm_stderr": 0.027772533334218967 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.917098445595855, "acc_stderr": 0.01989934131572178, "acc_norm": 0.917098445595855, "acc_norm_stderr": 0.01989934131572178 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.024078696580635477, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.024078696580635477 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.029381620726465066, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.029381620726465066 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.680672268907563, "acc_stderr": 0.0302839955258844, "acc_norm": 0.680672268907563, "acc_norm_stderr": 0.0302839955258844 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5, "acc_stderr": 0.034099716973523674, "acc_norm": 0.5, "acc_norm_stderr": 0.034099716973523674 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931792, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931792 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.0257449025322909, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.0257449025322909 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.04236511258094632, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.04236511258094632 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4375, "acc_stderr": 0.04708567521880525, "acc_norm": 0.4375, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.020930193185179326, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.020930193185179326 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8263090676883781, "acc_stderr": 0.01354741565866226, "acc_norm": 0.8263090676883781, "acc_norm_stderr": 0.01354741565866226 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4301675977653631, "acc_stderr": 0.016558601636041035, "acc_norm": 0.4301675977653631, "acc_norm_stderr": 0.016558601636041035 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818733, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818733 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.02583989833487798, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7685185185185185, "acc_stderr": 0.023468429832451166, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.023468429832451166 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5070921985815603, "acc_stderr": 0.02982449855912901, "acc_norm": 0.5070921985815603, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4667535853976532, "acc_stderr": 0.012741974333897227, "acc_norm": 0.4667535853976532, "acc_norm_stderr": 0.012741974333897227 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6727941176470589, "acc_stderr": 0.028501452860396553, "acc_norm": 0.6727941176470589, "acc_norm_stderr": 0.028501452860396553 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6650326797385621, "acc_stderr": 0.019094228167000325, "acc_norm": 0.6650326797385621, "acc_norm_stderr": 0.019094228167000325 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8507462686567164, "acc_stderr": 0.025196929874827044, "acc_norm": 0.8507462686567164, "acc_norm_stderr": 0.025196929874827044 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699122, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699122 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.5165238678090576, "mc1_stderr": 0.01749394019005772, "mc2": 0.6596210038626965, "mc2_stderr": 0.015322599620782891 }, "harness|winogrande|5": { "acc": 0.8279400157853196, "acc_stderr": 0.010607731615247022 }, "harness|gsm8k|5": { "acc": 0.7012888551933283, "acc_stderr": 0.012607137125693635 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jsfs11__WestOrcaNeuralMarco-DPO-v2-DARETIES-7B
[ "region:us" ]
2024-01-23T07:56:17+00:00
{"pretty_name": "Evaluation run of jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B](https://huggingface.co/jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jsfs11__WestOrcaNeuralMarco-DPO-v2-DARETIES-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T07:53:57.376778](https://huggingface.co/datasets/open-llm-leaderboard/details_jsfs11__WestOrcaNeuralMarco-DPO-v2-DARETIES-7B/blob/main/results_2024-01-23T07-53-57.376778.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.655303418618015,\n \"acc_stderr\": 0.03206026718388969,\n \"acc_norm\": 0.6549524807930509,\n \"acc_norm_stderr\": 0.03272590499883366,\n \"mc1\": 0.5165238678090576,\n \"mc1_stderr\": 0.01749394019005772,\n \"mc2\": 0.6596210038626965,\n \"mc2_stderr\": 0.015322599620782891\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6945392491467577,\n \"acc_stderr\": 0.013460080478002508,\n \"acc_norm\": 0.7192832764505119,\n \"acc_norm_stderr\": 0.01313123812697557\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7074287990440151,\n \"acc_stderr\": 0.0045401340050603214,\n \"acc_norm\": 0.880601473809998,\n \"acc_norm_stderr\": 0.003235941810943157\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6592592592592592,\n \"acc_stderr\": 0.04094376269996792,\n \"acc_norm\": 0.6592592592592592,\n \"acc_norm_stderr\": 0.04094376269996792\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.03738520676119669,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.03738520676119669\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695238,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695238\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.720754716981132,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.720754716981132,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.03533133389323657,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.03533133389323657\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5914893617021276,\n \"acc_stderr\": 0.032134180267015755,\n \"acc_norm\": 0.5914893617021276,\n \"acc_norm_stderr\": 0.032134180267015755\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5175438596491229,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.5175438596491229,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370332,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370332\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.025487187147859375,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.025487187147859375\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.023157879349083522,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.023157879349083522\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.032568666616811015,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.032568666616811015\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8131313131313131,\n \"acc_stderr\": 0.027772533334218967,\n \"acc_norm\": 0.8131313131313131,\n \"acc_norm_stderr\": 0.027772533334218967\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.917098445595855,\n \"acc_stderr\": 0.01989934131572178,\n \"acc_norm\": 0.917098445595855,\n \"acc_norm_stderr\": 0.01989934131572178\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635477,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635477\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.029381620726465066,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.029381620726465066\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.0302839955258844,\n \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.0302839955258844\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.034099716973523674,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.034099716973523674\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931792,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931792\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.0257449025322909,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.0257449025322909\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.04236511258094632,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.04236511258094632\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.020930193185179326,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.020930193185179326\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8263090676883781,\n \"acc_stderr\": 0.01354741565866226,\n \"acc_norm\": 0.8263090676883781,\n \"acc_norm_stderr\": 0.01354741565866226\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4301675977653631,\n \"acc_stderr\": 0.016558601636041035,\n \"acc_norm\": 0.4301675977653631,\n \"acc_norm_stderr\": 0.016558601636041035\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818733,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818733\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.023468429832451166,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.023468429832451166\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5070921985815603,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.5070921985815603,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4667535853976532,\n \"acc_stderr\": 0.012741974333897227,\n \"acc_norm\": 0.4667535853976532,\n \"acc_norm_stderr\": 0.012741974333897227\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.028501452860396553,\n \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.028501452860396553\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6650326797385621,\n \"acc_stderr\": 0.019094228167000325,\n \"acc_norm\": 0.6650326797385621,\n \"acc_norm_stderr\": 0.019094228167000325\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n \"acc_stderr\": 0.025196929874827044,\n \"acc_norm\": 0.8507462686567164,\n \"acc_norm_stderr\": 0.025196929874827044\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5165238678090576,\n \"mc1_stderr\": 0.01749394019005772,\n \"mc2\": 0.6596210038626965,\n \"mc2_stderr\": 0.015322599620782891\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8279400157853196,\n \"acc_stderr\": 0.010607731615247022\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7012888551933283,\n \"acc_stderr\": 0.012607137125693635\n }\n}\n```", "repo_url": "https://huggingface.co/jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|arc:challenge|25_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|gsm8k|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hellaswag|10_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T07-53-57.376778.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["**/details_harness|winogrande|5_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T07-53-57.376778.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T07_53_57.376778", "path": ["results_2024-01-23T07-53-57.376778.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T07-53-57.376778.parquet"]}]}]}
2024-01-23T07:56:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B Dataset automatically created during the evaluation run of model jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T07:53:57.376778(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B\n\n\n\nDataset automatically created during the evaluation run of model jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T07:53:57.376778(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B\n\n\n\nDataset automatically created during the evaluation run of model jsfs11/WestOrcaNeuralMarco-DPO-v2-DARETIES-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T07:53:57.376778(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
87e059428acc8d33bf3fe939fa29d8ca48d36694
# indosum INDOSUM is a new benchmark dataset for Indonesian text summarization. The dataset consists of news articles and manually constructed summaries. ## Dataset Usage Run `pip install nusacrowd` before loading the dataset through HuggingFace's `load_dataset`. ## Citation ``` @INPROCEEDINGS{8629109, author={Kurniawan, Kemal and Louvan, Samuel}, booktitle={2018 International Conference on Asian Language Processing (IALP)}, title={Indosum: A New Benchmark Dataset for Indonesian Text Summarization}, year={2018}, volume={}, number={}, pages={215-220}, doi={10.1109/IALP.2018.8629109}} ``` ## License Apache License, Version 2.0 ## Homepage [https://github.com/kata-ai/indosum](https://github.com/kata-ai/indosum) ### NusaCatalogue For easy indexing and metadata: [https://indonlp.github.io/nusa-catalogue](https://indonlp.github.io/nusa-catalogue)
maryantocinn/indosum
[ "language:ind", "summarization", "region:us" ]
2024-01-23T07:57:06+00:00
{"language": ["ind"], "tags": ["summarization"]}
2024-02-13T08:55:39+00:00
[]
[ "ind" ]
TAGS #language-Indonesian #summarization #region-us
# indosum INDOSUM is a new benchmark dataset for Indonesian text summarization. The dataset consists of news articles and manually constructed summaries. ## Dataset Usage Run 'pip install nusacrowd' before loading the dataset through HuggingFace's 'load_dataset'. ## License Apache License, Version 2.0 ## Homepage URL ### NusaCatalogue For easy indexing and metadata: URL
[ "# indosum\n\nINDOSUM is a new benchmark dataset for Indonesian text summarization. \n\nThe dataset consists of news articles and manually constructed summaries.", "## Dataset Usage\n\nRun 'pip install nusacrowd' before loading the dataset through HuggingFace's 'load_dataset'.", "## License\n\nApache License, Version 2.0", "## Homepage\n\nURL", "### NusaCatalogue\n\nFor easy indexing and metadata: URL" ]
[ "TAGS\n#language-Indonesian #summarization #region-us \n", "# indosum\n\nINDOSUM is a new benchmark dataset for Indonesian text summarization. \n\nThe dataset consists of news articles and manually constructed summaries.", "## Dataset Usage\n\nRun 'pip install nusacrowd' before loading the dataset through HuggingFace's 'load_dataset'.", "## License\n\nApache License, Version 2.0", "## Homepage\n\nURL", "### NusaCatalogue\n\nFor easy indexing and metadata: URL" ]
38af5a4d57ebad2517d9dd66cd3adb1554ef36dc
# Dataset Card for "quirky_modularaddition_increment0" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
zhiwei555/simplemodaddition
[ "region:us" ]
2024-01-23T07:59:59+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "train*"}, {"split": "validation", "path": "eval*"}, {"split": "full", "path": "full*"}]}]}
2024-01-23T08:21:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for "quirky_modularaddition_increment0" More Information needed
[ "# Dataset Card for \"quirky_modularaddition_increment0\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"quirky_modularaddition_increment0\"\n\nMore Information needed" ]
436c3a883544742e23799e271d5bd4455e1d7d89
111
lcm-org/org-data
[ "size_categories:1B<n<10B", "license:apache-2.0", "region:us" ]
2024-01-23T08:20:38+00:00
{"license": "apache-2.0", "size_categories": ["1B<n<10B"]}
2024-01-23T08:27:11+00:00
[]
[]
TAGS #size_categories-1B<n<10B #license-apache-2.0 #region-us
111
[]
[ "TAGS\n#size_categories-1B<n<10B #license-apache-2.0 #region-us \n" ]
e8b0102d81beb4d7a93f0883c19609e6a4adb08c
# Dataset Card for Evaluation run of cloudyu/Yi-34Bx2-MoE-60B-DPO <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [cloudyu/Yi-34Bx2-MoE-60B-DPO](https://huggingface.co/cloudyu/Yi-34Bx2-MoE-60B-DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_cloudyu__Yi-34Bx2-MoE-60B-DPO", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T09:26:46.662482](https://huggingface.co/datasets/open-llm-leaderboard/details_cloudyu__Yi-34Bx2-MoE-60B-DPO/blob/main/results_2024-01-23T09-26-46.662482.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7706672409381974, "acc_stderr": 0.027896926086644222, "acc_norm": 0.7738601958843111, "acc_norm_stderr": 0.028438404294113005, "mc1": 0.4883720930232558, "mc1_stderr": 0.017498767175740088, "mc2": 0.6624336903360023, "mc2_stderr": 0.0145357390643212 }, "harness|arc:challenge|25": { "acc": 0.6749146757679181, "acc_stderr": 0.013688147309729124, "acc_norm": 0.712457337883959, "acc_norm_stderr": 0.013226719056266129 }, "harness|hellaswag|10": { "acc": 0.6546504680342561, "acc_stderr": 0.004745103543901293, "acc_norm": 0.8510256920932086, "acc_norm_stderr": 0.0035533545281323554 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.7333333333333333, "acc_stderr": 0.038201699145179055, "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.038201699145179055 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.875, "acc_stderr": 0.026913523521537846, "acc_norm": 0.875, "acc_norm_stderr": 0.026913523521537846 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.78, "acc_stderr": 0.04163331998932261, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932261 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8, "acc_stderr": 0.02461829819586651, "acc_norm": 0.8, "acc_norm_stderr": 0.02461829819586651 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.875, "acc_stderr": 0.02765610492929436, "acc_norm": 0.875, "acc_norm_stderr": 0.02765610492929436 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.67, "acc_stderr": 0.04725815626252606, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.44, "acc_stderr": 0.0498887651569859, "acc_norm": 0.44, "acc_norm_stderr": 0.0498887651569859 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7630057803468208, "acc_stderr": 0.03242414757483098, "acc_norm": 0.7630057803468208, "acc_norm_stderr": 0.03242414757483098 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.6274509803921569, "acc_stderr": 0.048108401480826346, "acc_norm": 0.6274509803921569, "acc_norm_stderr": 0.048108401480826346 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.81, "acc_stderr": 0.039427724440366234, "acc_norm": 0.81, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7957446808510639, "acc_stderr": 0.026355158413349414, "acc_norm": 0.7957446808510639, "acc_norm_stderr": 0.026355158413349414 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.6140350877192983, "acc_stderr": 0.04579639422070434, "acc_norm": 0.6140350877192983, "acc_norm_stderr": 0.04579639422070434 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7586206896551724, "acc_stderr": 0.03565998174135302, "acc_norm": 0.7586206896551724, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.7275132275132276, "acc_stderr": 0.022930973071633363, "acc_norm": 0.7275132275132276, "acc_norm_stderr": 0.022930973071633363 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5714285714285714, "acc_stderr": 0.04426266681379909, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.04426266681379909 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.9032258064516129, "acc_stderr": 0.016818943416345197, "acc_norm": 0.9032258064516129, "acc_norm_stderr": 0.016818943416345197 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.625615763546798, "acc_stderr": 0.03405155380561952, "acc_norm": 0.625615763546798, "acc_norm_stderr": 0.03405155380561952 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.82, "acc_stderr": 0.03861229196653694, "acc_norm": 0.82, "acc_norm_stderr": 0.03861229196653694 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8666666666666667, "acc_stderr": 0.026544435312706463, "acc_norm": 0.8666666666666667, "acc_norm_stderr": 0.026544435312706463 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9292929292929293, "acc_stderr": 0.018263105420199505, "acc_norm": 0.9292929292929293, "acc_norm_stderr": 0.018263105420199505 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9689119170984456, "acc_stderr": 0.012525310625527033, "acc_norm": 0.9689119170984456, "acc_norm_stderr": 0.012525310625527033 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.8205128205128205, "acc_stderr": 0.0194573907876818, "acc_norm": 0.8205128205128205, "acc_norm_stderr": 0.0194573907876818 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.44074074074074077, "acc_stderr": 0.030270671157284067, "acc_norm": 0.44074074074074077, "acc_norm_stderr": 0.030270671157284067 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8529411764705882, "acc_stderr": 0.023005459446673947, "acc_norm": 0.8529411764705882, "acc_norm_stderr": 0.023005459446673947 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.5099337748344371, "acc_stderr": 0.04081677107248437, "acc_norm": 0.5099337748344371, "acc_norm_stderr": 0.04081677107248437 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9174311926605505, "acc_stderr": 0.011800361363016576, "acc_norm": 0.9174311926605505, "acc_norm_stderr": 0.011800361363016576 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6712962962962963, "acc_stderr": 0.032036140846700596, "acc_norm": 0.6712962962962963, "acc_norm_stderr": 0.032036140846700596 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9215686274509803, "acc_stderr": 0.018869514646658935, "acc_norm": 0.9215686274509803, "acc_norm_stderr": 0.018869514646658935 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8987341772151899, "acc_stderr": 0.019637720526065522, "acc_norm": 0.8987341772151899, "acc_norm_stderr": 0.019637720526065522 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7937219730941704, "acc_stderr": 0.02715715047956382, "acc_norm": 0.7937219730941704, "acc_norm_stderr": 0.02715715047956382 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.9083969465648855, "acc_stderr": 0.025300035578642962, "acc_norm": 0.9083969465648855, "acc_norm_stderr": 0.025300035578642962 }, "harness|hendrycksTest-international_law|5": { "acc": 0.9008264462809917, "acc_stderr": 0.027285246312758957, "acc_norm": 0.9008264462809917, "acc_norm_stderr": 0.027285246312758957 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8796296296296297, "acc_stderr": 0.031457038543062504, "acc_norm": 0.8796296296296297, "acc_norm_stderr": 0.031457038543062504 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8773006134969326, "acc_stderr": 0.025777328426978927, "acc_norm": 0.8773006134969326, "acc_norm_stderr": 0.025777328426978927 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.6339285714285714, "acc_stderr": 0.04572372358737431, "acc_norm": 0.6339285714285714, "acc_norm_stderr": 0.04572372358737431 }, "harness|hendrycksTest-management|5": { "acc": 0.912621359223301, "acc_stderr": 0.027960689125970654, "acc_norm": 0.912621359223301, "acc_norm_stderr": 0.027960689125970654 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9444444444444444, "acc_stderr": 0.015006312806446912, "acc_norm": 0.9444444444444444, "acc_norm_stderr": 0.015006312806446912 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.9, "acc_stderr": 0.030151134457776348, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776348 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.9080459770114943, "acc_stderr": 0.010333225570778518, "acc_norm": 0.9080459770114943, "acc_norm_stderr": 0.010333225570778518 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8294797687861272, "acc_stderr": 0.020247961569303728, "acc_norm": 0.8294797687861272, "acc_norm_stderr": 0.020247961569303728 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.8111731843575419, "acc_stderr": 0.013089403869745457, "acc_norm": 0.8111731843575419, "acc_norm_stderr": 0.013089403869745457 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8594771241830066, "acc_stderr": 0.019899435463539946, "acc_norm": 0.8594771241830066, "acc_norm_stderr": 0.019899435463539946 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8167202572347267, "acc_stderr": 0.02197419884826582, "acc_norm": 0.8167202572347267, "acc_norm_stderr": 0.02197419884826582 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8765432098765432, "acc_stderr": 0.01830386880689179, "acc_norm": 0.8765432098765432, "acc_norm_stderr": 0.01830386880689179 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6382978723404256, "acc_stderr": 0.028663820147199485, "acc_norm": 0.6382978723404256, "acc_norm_stderr": 0.028663820147199485 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.6023468057366362, "acc_stderr": 0.012499840347460642, "acc_norm": 0.6023468057366362, "acc_norm_stderr": 0.012499840347460642 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8272058823529411, "acc_stderr": 0.022966067585581774, "acc_norm": 0.8272058823529411, "acc_norm_stderr": 0.022966067585581774 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.8251633986928104, "acc_stderr": 0.015366167064780641, "acc_norm": 0.8251633986928104, "acc_norm_stderr": 0.015366167064780641 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7181818181818181, "acc_stderr": 0.043091187099464585, "acc_norm": 0.7181818181818181, "acc_norm_stderr": 0.043091187099464585 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8489795918367347, "acc_stderr": 0.02292300409473685, "acc_norm": 0.8489795918367347, "acc_norm_stderr": 0.02292300409473685 }, "harness|hendrycksTest-sociology|5": { "acc": 0.900497512437811, "acc_stderr": 0.021166216304659393, "acc_norm": 0.900497512437811, "acc_norm_stderr": 0.021166216304659393 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.92, "acc_stderr": 0.0272659924344291, "acc_norm": 0.92, "acc_norm_stderr": 0.0272659924344291 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699122, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699122 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8830409356725146, "acc_stderr": 0.02464806896136616, "acc_norm": 0.8830409356725146, "acc_norm_stderr": 0.02464806896136616 }, "harness|truthfulqa:mc|0": { "mc1": 0.4883720930232558, "mc1_stderr": 0.017498767175740088, "mc2": 0.6624336903360023, "mc2_stderr": 0.0145357390643212 }, "harness|winogrande|5": { "acc": 0.8476716653512234, "acc_stderr": 0.010099208246065588 }, "harness|gsm8k|5": { "acc": 0.7391963608794542, "acc_stderr": 0.01209425241733274 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_cloudyu__Yi-34Bx2-MoE-60B-DPO
[ "region:us" ]
2024-01-23T09:29:03+00:00
{"pretty_name": "Evaluation run of cloudyu/Yi-34Bx2-MoE-60B-DPO", "dataset_summary": "Dataset automatically created during the evaluation run of model [cloudyu/Yi-34Bx2-MoE-60B-DPO](https://huggingface.co/cloudyu/Yi-34Bx2-MoE-60B-DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_cloudyu__Yi-34Bx2-MoE-60B-DPO\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T09:26:46.662482](https://huggingface.co/datasets/open-llm-leaderboard/details_cloudyu__Yi-34Bx2-MoE-60B-DPO/blob/main/results_2024-01-23T09-26-46.662482.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7706672409381974,\n \"acc_stderr\": 0.027896926086644222,\n \"acc_norm\": 0.7738601958843111,\n \"acc_norm_stderr\": 0.028438404294113005,\n \"mc1\": 0.4883720930232558,\n \"mc1_stderr\": 0.017498767175740088,\n \"mc2\": 0.6624336903360023,\n \"mc2_stderr\": 0.0145357390643212\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6749146757679181,\n \"acc_stderr\": 0.013688147309729124,\n \"acc_norm\": 0.712457337883959,\n \"acc_norm_stderr\": 0.013226719056266129\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6546504680342561,\n \"acc_stderr\": 0.004745103543901293,\n \"acc_norm\": 0.8510256920932086,\n \"acc_norm_stderr\": 0.0035533545281323554\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.038201699145179055,\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.038201699145179055\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.875,\n \"acc_stderr\": 0.026913523521537846,\n \"acc_norm\": 0.875,\n \"acc_norm_stderr\": 0.026913523521537846\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932261,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932261\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.02461829819586651,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.02461829819586651\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.875,\n \"acc_stderr\": 0.02765610492929436,\n \"acc_norm\": 0.875,\n \"acc_norm_stderr\": 0.02765610492929436\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252606,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252606\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.0498887651569859,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.0498887651569859\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7630057803468208,\n \"acc_stderr\": 0.03242414757483098,\n \"acc_norm\": 0.7630057803468208,\n \"acc_norm_stderr\": 0.03242414757483098\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.6274509803921569,\n \"acc_stderr\": 0.048108401480826346,\n \"acc_norm\": 0.6274509803921569,\n \"acc_norm_stderr\": 0.048108401480826346\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7957446808510639,\n \"acc_stderr\": 0.026355158413349414,\n \"acc_norm\": 0.7957446808510639,\n \"acc_norm_stderr\": 0.026355158413349414\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.6140350877192983,\n \"acc_stderr\": 0.04579639422070434,\n \"acc_norm\": 0.6140350877192983,\n \"acc_norm_stderr\": 0.04579639422070434\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7586206896551724,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.7586206896551724,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.7275132275132276,\n \"acc_stderr\": 0.022930973071633363,\n \"acc_norm\": 0.7275132275132276,\n \"acc_norm_stderr\": 0.022930973071633363\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5714285714285714,\n \"acc_stderr\": 0.04426266681379909,\n \"acc_norm\": 0.5714285714285714,\n \"acc_norm_stderr\": 0.04426266681379909\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.9032258064516129,\n \"acc_stderr\": 0.016818943416345197,\n \"acc_norm\": 0.9032258064516129,\n \"acc_norm_stderr\": 0.016818943416345197\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.625615763546798,\n \"acc_stderr\": 0.03405155380561952,\n \"acc_norm\": 0.625615763546798,\n \"acc_norm_stderr\": 0.03405155380561952\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.03861229196653694,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.03861229196653694\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8666666666666667,\n \"acc_stderr\": 0.026544435312706463,\n \"acc_norm\": 0.8666666666666667,\n \"acc_norm_stderr\": 0.026544435312706463\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9292929292929293,\n \"acc_stderr\": 0.018263105420199505,\n \"acc_norm\": 0.9292929292929293,\n \"acc_norm_stderr\": 0.018263105420199505\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9689119170984456,\n \"acc_stderr\": 0.012525310625527033,\n \"acc_norm\": 0.9689119170984456,\n \"acc_norm_stderr\": 0.012525310625527033\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.8205128205128205,\n \"acc_stderr\": 0.0194573907876818,\n \"acc_norm\": 0.8205128205128205,\n \"acc_norm_stderr\": 0.0194573907876818\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.44074074074074077,\n \"acc_stderr\": 0.030270671157284067,\n \"acc_norm\": 0.44074074074074077,\n \"acc_norm_stderr\": 0.030270671157284067\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.023005459446673947,\n \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.023005459446673947\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.5099337748344371,\n \"acc_stderr\": 0.04081677107248437,\n \"acc_norm\": 0.5099337748344371,\n \"acc_norm_stderr\": 0.04081677107248437\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9174311926605505,\n \"acc_stderr\": 0.011800361363016576,\n \"acc_norm\": 0.9174311926605505,\n \"acc_norm_stderr\": 0.011800361363016576\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6712962962962963,\n \"acc_stderr\": 0.032036140846700596,\n \"acc_norm\": 0.6712962962962963,\n \"acc_norm_stderr\": 0.032036140846700596\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9215686274509803,\n \"acc_stderr\": 0.018869514646658935,\n \"acc_norm\": 0.9215686274509803,\n \"acc_norm_stderr\": 0.018869514646658935\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8987341772151899,\n \"acc_stderr\": 0.019637720526065522,\n \"acc_norm\": 0.8987341772151899,\n \"acc_norm_stderr\": 0.019637720526065522\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7937219730941704,\n \"acc_stderr\": 0.02715715047956382,\n \"acc_norm\": 0.7937219730941704,\n \"acc_norm_stderr\": 0.02715715047956382\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.9083969465648855,\n \"acc_stderr\": 0.025300035578642962,\n \"acc_norm\": 0.9083969465648855,\n \"acc_norm_stderr\": 0.025300035578642962\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.9008264462809917,\n \"acc_stderr\": 0.027285246312758957,\n \"acc_norm\": 0.9008264462809917,\n \"acc_norm_stderr\": 0.027285246312758957\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8796296296296297,\n \"acc_stderr\": 0.031457038543062504,\n \"acc_norm\": 0.8796296296296297,\n \"acc_norm_stderr\": 0.031457038543062504\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8773006134969326,\n \"acc_stderr\": 0.025777328426978927,\n \"acc_norm\": 0.8773006134969326,\n \"acc_norm_stderr\": 0.025777328426978927\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.6339285714285714,\n \"acc_stderr\": 0.04572372358737431,\n \"acc_norm\": 0.6339285714285714,\n \"acc_norm_stderr\": 0.04572372358737431\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.912621359223301,\n \"acc_stderr\": 0.027960689125970654,\n \"acc_norm\": 0.912621359223301,\n \"acc_norm_stderr\": 0.027960689125970654\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9444444444444444,\n \"acc_stderr\": 0.015006312806446912,\n \"acc_norm\": 0.9444444444444444,\n \"acc_norm_stderr\": 0.015006312806446912\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776348,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776348\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.9080459770114943,\n \"acc_stderr\": 0.010333225570778518,\n \"acc_norm\": 0.9080459770114943,\n \"acc_norm_stderr\": 0.010333225570778518\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8294797687861272,\n \"acc_stderr\": 0.020247961569303728,\n \"acc_norm\": 0.8294797687861272,\n \"acc_norm_stderr\": 0.020247961569303728\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.8111731843575419,\n \"acc_stderr\": 0.013089403869745457,\n \"acc_norm\": 0.8111731843575419,\n \"acc_norm_stderr\": 0.013089403869745457\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8594771241830066,\n \"acc_stderr\": 0.019899435463539946,\n \"acc_norm\": 0.8594771241830066,\n \"acc_norm_stderr\": 0.019899435463539946\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8167202572347267,\n \"acc_stderr\": 0.02197419884826582,\n \"acc_norm\": 0.8167202572347267,\n \"acc_norm_stderr\": 0.02197419884826582\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8765432098765432,\n \"acc_stderr\": 0.01830386880689179,\n \"acc_norm\": 0.8765432098765432,\n \"acc_norm_stderr\": 0.01830386880689179\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.6382978723404256,\n \"acc_stderr\": 0.028663820147199485,\n \"acc_norm\": 0.6382978723404256,\n \"acc_norm_stderr\": 0.028663820147199485\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.6023468057366362,\n \"acc_stderr\": 0.012499840347460642,\n \"acc_norm\": 0.6023468057366362,\n \"acc_norm_stderr\": 0.012499840347460642\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8272058823529411,\n \"acc_stderr\": 0.022966067585581774,\n \"acc_norm\": 0.8272058823529411,\n \"acc_norm_stderr\": 0.022966067585581774\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.8251633986928104,\n \"acc_stderr\": 0.015366167064780641,\n \"acc_norm\": 0.8251633986928104,\n \"acc_norm_stderr\": 0.015366167064780641\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7181818181818181,\n \"acc_stderr\": 0.043091187099464585,\n \"acc_norm\": 0.7181818181818181,\n \"acc_norm_stderr\": 0.043091187099464585\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8489795918367347,\n \"acc_stderr\": 0.02292300409473685,\n \"acc_norm\": 0.8489795918367347,\n \"acc_norm_stderr\": 0.02292300409473685\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.900497512437811,\n \"acc_stderr\": 0.021166216304659393,\n \"acc_norm\": 0.900497512437811,\n \"acc_norm_stderr\": 0.021166216304659393\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.92,\n \"acc_stderr\": 0.0272659924344291,\n \"acc_norm\": 0.92,\n \"acc_norm_stderr\": 0.0272659924344291\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8830409356725146,\n \"acc_stderr\": 0.02464806896136616,\n \"acc_norm\": 0.8830409356725146,\n \"acc_norm_stderr\": 0.02464806896136616\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4883720930232558,\n \"mc1_stderr\": 0.017498767175740088,\n \"mc2\": 0.6624336903360023,\n \"mc2_stderr\": 0.0145357390643212\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8476716653512234,\n \"acc_stderr\": 0.010099208246065588\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7391963608794542,\n \"acc_stderr\": 0.01209425241733274\n }\n}\n```", "repo_url": "https://huggingface.co/cloudyu/Yi-34Bx2-MoE-60B-DPO", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|arc:challenge|25_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|gsm8k|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hellaswag|10_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T09-26-46.662482.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["**/details_harness|winogrande|5_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T09-26-46.662482.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T09_26_46.662482", "path": ["results_2024-01-23T09-26-46.662482.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T09-26-46.662482.parquet"]}]}]}
2024-01-23T09:29:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of cloudyu/Yi-34Bx2-MoE-60B-DPO Dataset automatically created during the evaluation run of model cloudyu/Yi-34Bx2-MoE-60B-DPO on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T09:26:46.662482(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of cloudyu/Yi-34Bx2-MoE-60B-DPO\n\n\n\nDataset automatically created during the evaluation run of model cloudyu/Yi-34Bx2-MoE-60B-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T09:26:46.662482(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of cloudyu/Yi-34Bx2-MoE-60B-DPO\n\n\n\nDataset automatically created during the evaluation run of model cloudyu/Yi-34Bx2-MoE-60B-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T09:26:46.662482(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
9679674d96672b51c0ccc8195cb06b7919256193
# Dataset Card for Evaluation run of cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO](https://huggingface.co/cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_cloudyu__Mixtral-8x7B-Instruct-v0.1-DPO", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T09:56:27.618926](https://huggingface.co/datasets/open-llm-leaderboard/details_cloudyu__Mixtral-8x7B-Instruct-v0.1-DPO/blob/main/results_2024-01-23T09-56-27.618926.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7097116323834379, "acc_stderr": 0.030292548697761927, "acc_norm": 0.713088299746748, "acc_norm_stderr": 0.030879828476358652, "mc1": 0.5397796817625459, "mc1_stderr": 0.01744801722396087, "mc2": 0.6917984166538759, "mc2_stderr": 0.015015749128241987 }, "harness|arc:challenge|25": { "acc": 0.6783276450511946, "acc_stderr": 0.013650488084494162, "acc_norm": 0.6979522184300341, "acc_norm_stderr": 0.013417519144716412 }, "harness|hellaswag|10": { "acc": 0.690300736904999, "acc_stderr": 0.004614246282055376, "acc_norm": 0.8783110934076878, "acc_norm_stderr": 0.0032625801905118595 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6814814814814815, "acc_stderr": 0.04024778401977108, "acc_norm": 0.6814814814814815, "acc_norm_stderr": 0.04024778401977108 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7894736842105263, "acc_stderr": 0.03317672787533157, "acc_norm": 0.7894736842105263, "acc_norm_stderr": 0.03317672787533157 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7735849056603774, "acc_stderr": 0.02575755989310673, "acc_norm": 0.7735849056603774, "acc_norm_stderr": 0.02575755989310673 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8125, "acc_stderr": 0.032639560491693344, "acc_norm": 0.8125, "acc_norm_stderr": 0.032639560491693344 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7630057803468208, "acc_stderr": 0.032424147574830975, "acc_norm": 0.7630057803468208, "acc_norm_stderr": 0.032424147574830975 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.81, "acc_stderr": 0.03942772444036624, "acc_norm": 0.81, "acc_norm_stderr": 0.03942772444036624 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6638297872340425, "acc_stderr": 0.030881618520676942, "acc_norm": 0.6638297872340425, "acc_norm_stderr": 0.030881618520676942 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5789473684210527, "acc_stderr": 0.04644602091222317, "acc_norm": 0.5789473684210527, "acc_norm_stderr": 0.04644602091222317 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6482758620689655, "acc_stderr": 0.0397923663749741, "acc_norm": 0.6482758620689655, "acc_norm_stderr": 0.0397923663749741 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.47883597883597884, "acc_stderr": 0.025728230952130733, "acc_norm": 0.47883597883597884, "acc_norm_stderr": 0.025728230952130733 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5, "acc_stderr": 0.04472135954999579, "acc_norm": 0.5, "acc_norm_stderr": 0.04472135954999579 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8580645161290322, "acc_stderr": 0.019853003676559754, "acc_norm": 0.8580645161290322, "acc_norm_stderr": 0.019853003676559754 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6108374384236454, "acc_stderr": 0.03430462416103872, "acc_norm": 0.6108374384236454, "acc_norm_stderr": 0.03430462416103872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.77, "acc_stderr": 0.04229525846816508, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816508 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.793939393939394, "acc_stderr": 0.03158415324047709, "acc_norm": 0.793939393939394, "acc_norm_stderr": 0.03158415324047709 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9585492227979274, "acc_stderr": 0.01438543285747646, "acc_norm": 0.9585492227979274, "acc_norm_stderr": 0.01438543285747646 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6923076923076923, "acc_stderr": 0.023400928918310488, "acc_norm": 0.6923076923076923, "acc_norm_stderr": 0.023400928918310488 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3962962962962963, "acc_stderr": 0.029822619458534004, "acc_norm": 0.3962962962962963, "acc_norm_stderr": 0.029822619458534004 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8025210084033614, "acc_stderr": 0.02585916412205145, "acc_norm": 0.8025210084033614, "acc_norm_stderr": 0.02585916412205145 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.45695364238410596, "acc_stderr": 0.04067325174247444, "acc_norm": 0.45695364238410596, "acc_norm_stderr": 0.04067325174247444 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8862385321100917, "acc_stderr": 0.013613614800232808, "acc_norm": 0.8862385321100917, "acc_norm_stderr": 0.013613614800232808 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5879629629629629, "acc_stderr": 0.03356787758160831, "acc_norm": 0.5879629629629629, "acc_norm_stderr": 0.03356787758160831 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8676470588235294, "acc_stderr": 0.02378429752091885, "acc_norm": 0.8676470588235294, "acc_norm_stderr": 0.02378429752091885 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.869198312236287, "acc_stderr": 0.02194876605947076, "acc_norm": 0.869198312236287, "acc_norm_stderr": 0.02194876605947076 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.757847533632287, "acc_stderr": 0.028751392398694755, "acc_norm": 0.757847533632287, "acc_norm_stderr": 0.028751392398694755 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.03498149385462469, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.03498149385462469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035202, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035202 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8148148148148148, "acc_stderr": 0.03755265865037182, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.03755265865037182 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.803680981595092, "acc_stderr": 0.031207970394709225, "acc_norm": 0.803680981595092, "acc_norm_stderr": 0.031207970394709225 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5714285714285714, "acc_stderr": 0.04697113923010213, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.04697113923010213 }, "harness|hendrycksTest-management|5": { "acc": 0.8446601941747572, "acc_stderr": 0.035865947385739734, "acc_norm": 0.8446601941747572, "acc_norm_stderr": 0.035865947385739734 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9230769230769231, "acc_stderr": 0.017456987872436193, "acc_norm": 0.9230769230769231, "acc_norm_stderr": 0.017456987872436193 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.77, "acc_stderr": 0.04229525846816506, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8825031928480205, "acc_stderr": 0.011515102251977221, "acc_norm": 0.8825031928480205, "acc_norm_stderr": 0.011515102251977221 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7861271676300579, "acc_stderr": 0.02207570925175718, "acc_norm": 0.7861271676300579, "acc_norm_stderr": 0.02207570925175718 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.45363128491620114, "acc_stderr": 0.016650437588269073, "acc_norm": 0.45363128491620114, "acc_norm_stderr": 0.016650437588269073 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8235294117647058, "acc_stderr": 0.021828596053108395, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.021828596053108395 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7845659163987139, "acc_stderr": 0.023350225475471442, "acc_norm": 0.7845659163987139, "acc_norm_stderr": 0.023350225475471442 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8271604938271605, "acc_stderr": 0.021038517770157358, "acc_norm": 0.8271604938271605, "acc_norm_stderr": 0.021038517770157358 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5567375886524822, "acc_stderr": 0.029634838473766006, "acc_norm": 0.5567375886524822, "acc_norm_stderr": 0.029634838473766006 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5469361147327249, "acc_stderr": 0.012713845972358995, "acc_norm": 0.5469361147327249, "acc_norm_stderr": 0.012713845972358995 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7867647058823529, "acc_stderr": 0.024880971512294257, "acc_norm": 0.7867647058823529, "acc_norm_stderr": 0.024880971512294257 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7647058823529411, "acc_stderr": 0.017160587235046345, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.017160587235046345 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7, "acc_stderr": 0.04389311454644286, "acc_norm": 0.7, "acc_norm_stderr": 0.04389311454644286 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7877551020408163, "acc_stderr": 0.026176967197866764, "acc_norm": 0.7877551020408163, "acc_norm_stderr": 0.026176967197866764 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8656716417910447, "acc_stderr": 0.024112678240900798, "acc_norm": 0.8656716417910447, "acc_norm_stderr": 0.024112678240900798 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.030151134457776334, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776334 }, "harness|hendrycksTest-virology|5": { "acc": 0.5120481927710844, "acc_stderr": 0.03891364495835816, "acc_norm": 0.5120481927710844, "acc_norm_stderr": 0.03891364495835816 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8888888888888888, "acc_stderr": 0.024103384202072864, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.024103384202072864 }, "harness|truthfulqa:mc|0": { "mc1": 0.5397796817625459, "mc1_stderr": 0.01744801722396087, "mc2": 0.6917984166538759, "mc2_stderr": 0.015015749128241987 }, "harness|winogrande|5": { "acc": 0.813733228097869, "acc_stderr": 0.010941877955676207 }, "harness|gsm8k|5": { "acc": 0.6141015921152388, "acc_stderr": 0.013409077471319175 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_cloudyu__Mixtral-8x7B-Instruct-v0.1-DPO
[ "region:us" ]
2024-01-23T09:58:41+00:00
{"pretty_name": "Evaluation run of cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO", "dataset_summary": "Dataset automatically created during the evaluation run of model [cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO](https://huggingface.co/cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_cloudyu__Mixtral-8x7B-Instruct-v0.1-DPO\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T09:56:27.618926](https://huggingface.co/datasets/open-llm-leaderboard/details_cloudyu__Mixtral-8x7B-Instruct-v0.1-DPO/blob/main/results_2024-01-23T09-56-27.618926.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7097116323834379,\n \"acc_stderr\": 0.030292548697761927,\n \"acc_norm\": 0.713088299746748,\n \"acc_norm_stderr\": 0.030879828476358652,\n \"mc1\": 0.5397796817625459,\n \"mc1_stderr\": 0.01744801722396087,\n \"mc2\": 0.6917984166538759,\n \"mc2_stderr\": 0.015015749128241987\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6783276450511946,\n \"acc_stderr\": 0.013650488084494162,\n \"acc_norm\": 0.6979522184300341,\n \"acc_norm_stderr\": 0.013417519144716412\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.690300736904999,\n \"acc_stderr\": 0.004614246282055376,\n \"acc_norm\": 0.8783110934076878,\n \"acc_norm_stderr\": 0.0032625801905118595\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6814814814814815,\n \"acc_stderr\": 0.04024778401977108,\n \"acc_norm\": 0.6814814814814815,\n \"acc_norm_stderr\": 0.04024778401977108\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7894736842105263,\n \"acc_stderr\": 0.03317672787533157,\n \"acc_norm\": 0.7894736842105263,\n \"acc_norm_stderr\": 0.03317672787533157\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7735849056603774,\n \"acc_stderr\": 0.02575755989310673,\n \"acc_norm\": 0.7735849056603774,\n \"acc_norm_stderr\": 0.02575755989310673\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8125,\n \"acc_stderr\": 0.032639560491693344,\n \"acc_norm\": 0.8125,\n \"acc_norm_stderr\": 0.032639560491693344\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7630057803468208,\n \"acc_stderr\": 0.032424147574830975,\n \"acc_norm\": 0.7630057803468208,\n \"acc_norm_stderr\": 0.032424147574830975\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.03942772444036624,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.03942772444036624\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6638297872340425,\n \"acc_stderr\": 0.030881618520676942,\n \"acc_norm\": 0.6638297872340425,\n \"acc_norm_stderr\": 0.030881618520676942\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5789473684210527,\n \"acc_stderr\": 0.04644602091222317,\n \"acc_norm\": 0.5789473684210527,\n \"acc_norm_stderr\": 0.04644602091222317\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6482758620689655,\n \"acc_stderr\": 0.0397923663749741,\n \"acc_norm\": 0.6482758620689655,\n \"acc_norm_stderr\": 0.0397923663749741\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.47883597883597884,\n \"acc_stderr\": 0.025728230952130733,\n \"acc_norm\": 0.47883597883597884,\n \"acc_norm_stderr\": 0.025728230952130733\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04472135954999579,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04472135954999579\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8580645161290322,\n \"acc_stderr\": 0.019853003676559754,\n \"acc_norm\": 0.8580645161290322,\n \"acc_norm_stderr\": 0.019853003676559754\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6108374384236454,\n \"acc_stderr\": 0.03430462416103872,\n \"acc_norm\": 0.6108374384236454,\n \"acc_norm_stderr\": 0.03430462416103872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816508,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816508\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.03158415324047709,\n \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.03158415324047709\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9585492227979274,\n \"acc_stderr\": 0.01438543285747646,\n \"acc_norm\": 0.9585492227979274,\n \"acc_norm_stderr\": 0.01438543285747646\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6923076923076923,\n \"acc_stderr\": 0.023400928918310488,\n \"acc_norm\": 0.6923076923076923,\n \"acc_norm_stderr\": 0.023400928918310488\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3962962962962963,\n \"acc_stderr\": 0.029822619458534004,\n \"acc_norm\": 0.3962962962962963,\n \"acc_norm_stderr\": 0.029822619458534004\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8025210084033614,\n \"acc_stderr\": 0.02585916412205145,\n \"acc_norm\": 0.8025210084033614,\n \"acc_norm_stderr\": 0.02585916412205145\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.45695364238410596,\n \"acc_stderr\": 0.04067325174247444,\n \"acc_norm\": 0.45695364238410596,\n \"acc_norm_stderr\": 0.04067325174247444\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8862385321100917,\n \"acc_stderr\": 0.013613614800232808,\n \"acc_norm\": 0.8862385321100917,\n \"acc_norm_stderr\": 0.013613614800232808\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5879629629629629,\n \"acc_stderr\": 0.03356787758160831,\n \"acc_norm\": 0.5879629629629629,\n \"acc_norm_stderr\": 0.03356787758160831\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8676470588235294,\n \"acc_stderr\": 0.02378429752091885,\n \"acc_norm\": 0.8676470588235294,\n \"acc_norm_stderr\": 0.02378429752091885\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.869198312236287,\n \"acc_stderr\": 0.02194876605947076,\n \"acc_norm\": 0.869198312236287,\n \"acc_norm_stderr\": 0.02194876605947076\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.757847533632287,\n \"acc_stderr\": 0.028751392398694755,\n \"acc_norm\": 0.757847533632287,\n \"acc_norm_stderr\": 0.028751392398694755\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.03498149385462469,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.03498149385462469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035202,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035202\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.03755265865037182,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.03755265865037182\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.803680981595092,\n \"acc_stderr\": 0.031207970394709225,\n \"acc_norm\": 0.803680981595092,\n \"acc_norm_stderr\": 0.031207970394709225\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5714285714285714,\n \"acc_stderr\": 0.04697113923010213,\n \"acc_norm\": 0.5714285714285714,\n \"acc_norm_stderr\": 0.04697113923010213\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8446601941747572,\n \"acc_stderr\": 0.035865947385739734,\n \"acc_norm\": 0.8446601941747572,\n \"acc_norm_stderr\": 0.035865947385739734\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9230769230769231,\n \"acc_stderr\": 0.017456987872436193,\n \"acc_norm\": 0.9230769230769231,\n \"acc_norm_stderr\": 0.017456987872436193\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8825031928480205,\n \"acc_stderr\": 0.011515102251977221,\n \"acc_norm\": 0.8825031928480205,\n \"acc_norm_stderr\": 0.011515102251977221\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7861271676300579,\n \"acc_stderr\": 0.02207570925175718,\n \"acc_norm\": 0.7861271676300579,\n \"acc_norm_stderr\": 0.02207570925175718\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.45363128491620114,\n \"acc_stderr\": 0.016650437588269073,\n \"acc_norm\": 0.45363128491620114,\n \"acc_norm_stderr\": 0.016650437588269073\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.021828596053108395,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.021828596053108395\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7845659163987139,\n \"acc_stderr\": 0.023350225475471442,\n \"acc_norm\": 0.7845659163987139,\n \"acc_norm_stderr\": 0.023350225475471442\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8271604938271605,\n \"acc_stderr\": 0.021038517770157358,\n \"acc_norm\": 0.8271604938271605,\n \"acc_norm_stderr\": 0.021038517770157358\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5567375886524822,\n \"acc_stderr\": 0.029634838473766006,\n \"acc_norm\": 0.5567375886524822,\n \"acc_norm_stderr\": 0.029634838473766006\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5469361147327249,\n \"acc_stderr\": 0.012713845972358995,\n \"acc_norm\": 0.5469361147327249,\n \"acc_norm_stderr\": 0.012713845972358995\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7867647058823529,\n \"acc_stderr\": 0.024880971512294257,\n \"acc_norm\": 0.7867647058823529,\n \"acc_norm_stderr\": 0.024880971512294257\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.017160587235046345,\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.017160587235046345\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644286,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644286\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7877551020408163,\n \"acc_stderr\": 0.026176967197866764,\n \"acc_norm\": 0.7877551020408163,\n \"acc_norm_stderr\": 0.026176967197866764\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8656716417910447,\n \"acc_stderr\": 0.024112678240900798,\n \"acc_norm\": 0.8656716417910447,\n \"acc_norm_stderr\": 0.024112678240900798\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.03891364495835816,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.03891364495835816\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.024103384202072864,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.024103384202072864\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5397796817625459,\n \"mc1_stderr\": 0.01744801722396087,\n \"mc2\": 0.6917984166538759,\n \"mc2_stderr\": 0.015015749128241987\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.813733228097869,\n \"acc_stderr\": 0.010941877955676207\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6141015921152388,\n \"acc_stderr\": 0.013409077471319175\n }\n}\n```", "repo_url": "https://huggingface.co/cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|arc:challenge|25_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|gsm8k|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hellaswag|10_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T09-56-27.618926.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["**/details_harness|winogrande|5_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T09-56-27.618926.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T09_56_27.618926", "path": ["results_2024-01-23T09-56-27.618926.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T09-56-27.618926.parquet"]}]}]}
2024-01-23T09:59:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO Dataset automatically created during the evaluation run of model cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T09:56:27.618926(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO\n\n\n\nDataset automatically created during the evaluation run of model cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T09:56:27.618926(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO\n\n\n\nDataset automatically created during the evaluation run of model cloudyu/Mixtral-8x7B-Instruct-v0.1-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T09:56:27.618926(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
e975745732ede854f7990056bf143f4df456ea4e
This dataset inclueds labeled sprite animations that look the best imho. More info https://github.com/PawKanarek/spraix
pawkanarek/spraix_1024_best_96
[ "size_categories:n<1K", "license:gpl-3.0", "art", "region:us" ]
2024-01-23T10:09:37+00:00
{"license": "gpl-3.0", "size_categories": ["n<1K"], "pretty_name": "96 images to train sprite animations", "tags": ["art"]}
2024-02-06T13:04:18+00:00
[]
[]
TAGS #size_categories-n<1K #license-gpl-3.0 #art #region-us
This dataset inclueds labeled sprite animations that look the best imho. More info URL
[]
[ "TAGS\n#size_categories-n<1K #license-gpl-3.0 #art #region-us \n" ]
a3c4684f091c330b5867adf52aa7c03390643c7b
# Dataset Card for Evaluation run of mathurinache/Odysseas-11B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [mathurinache/Odysseas-11B](https://huggingface.co/mathurinache/Odysseas-11B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_mathurinache__Odysseas-11B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T13:39:56.584949](https://huggingface.co/datasets/open-llm-leaderboard/details_mathurinache__Odysseas-11B/blob/main/results_2024-01-23T13-39-56.584949.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.23196194129343728, "acc_stderr": 0.029934654752561563, "acc_norm": 0.2314240573187148, "acc_norm_stderr": 0.03071122006512167, "mc1": 1.0, "mc1_stderr": 0.0, "mc2": NaN, "mc2_stderr": NaN }, "harness|arc:challenge|25": { "acc": 0.22696245733788395, "acc_stderr": 0.012240491536132861, "acc_norm": 0.22696245733788395, "acc_norm_stderr": 0.012240491536132861 }, "harness|hellaswag|10": { "acc": 0.2504481179047998, "acc_stderr": 0.004323856300539177, "acc_norm": 0.2504481179047998, "acc_norm_stderr": 0.004323856300539177 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20256410256410257, "acc_stderr": 0.020377660970371372, "acc_norm": 0.20256410256410257, "acc_norm_stderr": 0.020377660970371372 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.02974504857267404, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.02974504857267404 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 1.0, "mc1_stderr": 0.0, "mc2": NaN, "mc2_stderr": NaN }, "harness|winogrande|5": { "acc": 0.4956590370955012, "acc_stderr": 0.014051956064076911 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_mathurinache__Odysseas-11B
[ "region:us" ]
2024-01-23T10:11:03+00:00
{"pretty_name": "Evaluation run of mathurinache/Odysseas-11B", "dataset_summary": "Dataset automatically created during the evaluation run of model [mathurinache/Odysseas-11B](https://huggingface.co/mathurinache/Odysseas-11B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mathurinache__Odysseas-11B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T13:39:56.584949](https://huggingface.co/datasets/open-llm-leaderboard/details_mathurinache__Odysseas-11B/blob/main/results_2024-01-23T13-39-56.584949.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23196194129343728,\n \"acc_stderr\": 0.029934654752561563,\n \"acc_norm\": 0.2314240573187148,\n \"acc_norm_stderr\": 0.03071122006512167,\n \"mc1\": 1.0,\n \"mc1_stderr\": 0.0,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.22696245733788395,\n \"acc_stderr\": 0.012240491536132861,\n \"acc_norm\": 0.22696245733788395,\n \"acc_norm_stderr\": 0.012240491536132861\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2504481179047998,\n \"acc_stderr\": 0.004323856300539177,\n \"acc_norm\": 0.2504481179047998,\n \"acc_norm_stderr\": 0.004323856300539177\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 1.0,\n \"mc1_stderr\": 0.0,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.4956590370955012,\n \"acc_stderr\": 0.014051956064076911\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/mathurinache/Odysseas-11B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|arc:challenge|25_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|arc:challenge|25_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|gsm8k|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|gsm8k|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hellaswag|10_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hellaswag|10_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T10-08-45.752755.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T13-39-56.584949.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["**/details_harness|winogrande|5_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["**/details_harness|winogrande|5_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T13-39-56.584949.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T10_08_45.752755", "path": ["results_2024-01-23T10-08-45.752755.parquet"]}, {"split": "2024_01_23T13_39_56.584949", "path": ["results_2024-01-23T13-39-56.584949.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T13-39-56.584949.parquet"]}]}]}
2024-01-23T13:42:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of mathurinache/Odysseas-11B Dataset automatically created during the evaluation run of model mathurinache/Odysseas-11B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T13:39:56.584949(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of mathurinache/Odysseas-11B\n\n\n\nDataset automatically created during the evaluation run of model mathurinache/Odysseas-11B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T13:39:56.584949(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of mathurinache/Odysseas-11B\n\n\n\nDataset automatically created during the evaluation run of model mathurinache/Odysseas-11B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T13:39:56.584949(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
3699432633560d455e8ea77f3e2bb758ed96b181
# Dataset Card for Evaluation run of aloobun/slerp_bun_mistral_7b_v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [aloobun/slerp_bun_mistral_7b_v2](https://huggingface.co/aloobun/slerp_bun_mistral_7b_v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_aloobun__slerp_bun_mistral_7b_v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T10:20:17.591541](https://huggingface.co/datasets/open-llm-leaderboard/details_aloobun__slerp_bun_mistral_7b_v2/blob/main/results_2024-01-23T10-20-17.591541.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.647773501326585, "acc_stderr": 0.03203032727442697, "acc_norm": 0.6497939835395516, "acc_norm_stderr": 0.03267408145525671, "mc1": 0.3268053855569155, "mc1_stderr": 0.016419874731135032, "mc2": 0.48099431406829407, "mc2_stderr": 0.014890298910191732 }, "harness|arc:challenge|25": { "acc": 0.6143344709897611, "acc_stderr": 0.014224250973257174, "acc_norm": 0.6561433447098977, "acc_norm_stderr": 0.01388064457015621 }, "harness|hellaswag|10": { "acc": 0.6600278828918542, "acc_stderr": 0.004727312448892837, "acc_norm": 0.8528181637124079, "acc_norm_stderr": 0.003535630289091451 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.041716541613545426, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.041716541613545426 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7105263157894737, "acc_stderr": 0.03690677986137283, "acc_norm": 0.7105263157894737, "acc_norm_stderr": 0.03690677986137283 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7283018867924528, "acc_stderr": 0.027377706624670713, "acc_norm": 0.7283018867924528, "acc_norm_stderr": 0.027377706624670713 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6878612716763006, "acc_stderr": 0.03533133389323657, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.03533133389323657 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082635, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082635 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146268, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146268 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.04113914981189261, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.04113914981189261 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41798941798941797, "acc_stderr": 0.025402555503260912, "acc_norm": 0.41798941798941797, "acc_norm_stderr": 0.025402555503260912 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7870967741935484, "acc_stderr": 0.02328766512726855, "acc_norm": 0.7870967741935484, "acc_norm_stderr": 0.02328766512726855 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.49261083743842365, "acc_stderr": 0.035176035403610084, "acc_norm": 0.49261083743842365, "acc_norm_stderr": 0.035176035403610084 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.032876667586034906, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.032876667586034906 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7777777777777778, "acc_stderr": 0.02962022787479049, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.02962022787479049 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.023381935348121437, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.023381935348121437 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.02407869658063548, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.02407869658063548 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.029381620726465073, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.029381620726465073 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7016806722689075, "acc_stderr": 0.029719142876342856, "acc_norm": 0.7016806722689075, "acc_norm_stderr": 0.029719142876342856 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374301, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374301 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4861111111111111, "acc_stderr": 0.03408655867977748, "acc_norm": 0.4861111111111111, "acc_norm_stderr": 0.03408655867977748 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8284313725490197, "acc_stderr": 0.026460569561240644, "acc_norm": 0.8284313725490197, "acc_norm_stderr": 0.026460569561240644 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290916, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290916 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6816143497757847, "acc_stderr": 0.03126580522513713, "acc_norm": 0.6816143497757847, "acc_norm_stderr": 0.03126580522513713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.03498149385462472, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.03498149385462472 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.03849856098794088, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.03849856098794088 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5267857142857143, "acc_stderr": 0.047389751192741546, "acc_norm": 0.5267857142857143, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8675213675213675, "acc_stderr": 0.022209309073165612, "acc_norm": 0.8675213675213675, "acc_norm_stderr": 0.022209309073165612 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.044084400227680794, "acc_norm": 0.74, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8326947637292464, "acc_stderr": 0.013347327202920332, "acc_norm": 0.8326947637292464, "acc_norm_stderr": 0.013347327202920332 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7485549132947977, "acc_stderr": 0.02335736578587403, "acc_norm": 0.7485549132947977, "acc_norm_stderr": 0.02335736578587403 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.29832402234636873, "acc_stderr": 0.015301840045129278, "acc_norm": 0.29832402234636873, "acc_norm_stderr": 0.015301840045129278 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7549019607843137, "acc_stderr": 0.02463004897982478, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.02463004897982478 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7009646302250804, "acc_stderr": 0.02600330111788514, "acc_norm": 0.7009646302250804, "acc_norm_stderr": 0.02600330111788514 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7685185185185185, "acc_stderr": 0.02346842983245115, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.02346842983245115 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4645390070921986, "acc_stderr": 0.02975238965742705, "acc_norm": 0.4645390070921986, "acc_norm_stderr": 0.02975238965742705 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46936114732724904, "acc_stderr": 0.012746237711716634, "acc_norm": 0.46936114732724904, "acc_norm_stderr": 0.012746237711716634 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6654411764705882, "acc_stderr": 0.028661996202335303, "acc_norm": 0.6654411764705882, "acc_norm_stderr": 0.028661996202335303 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6764705882352942, "acc_stderr": 0.018926082916083387, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.018926082916083387 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.04494290866252091, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.04494290866252091 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.746938775510204, "acc_stderr": 0.02783302387139968, "acc_norm": 0.746938775510204, "acc_norm_stderr": 0.02783302387139968 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578337, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578337 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5421686746987951, "acc_stderr": 0.0387862677100236, "acc_norm": 0.5421686746987951, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.3268053855569155, "mc1_stderr": 0.016419874731135032, "mc2": 0.48099431406829407, "mc2_stderr": 0.014890298910191732 }, "harness|winogrande|5": { "acc": 0.8082083662194159, "acc_stderr": 0.011065209664659527 }, "harness|gsm8k|5": { "acc": 0.6027293404094011, "acc_stderr": 0.013478659652337794 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_aloobun__slerp_bun_mistral_7b_v2
[ "region:us" ]
2024-01-23T10:18:52+00:00
{"pretty_name": "Evaluation run of aloobun/slerp_bun_mistral_7b_v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [aloobun/slerp_bun_mistral_7b_v2](https://huggingface.co/aloobun/slerp_bun_mistral_7b_v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_aloobun__slerp_bun_mistral_7b_v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T10:20:17.591541](https://huggingface.co/datasets/open-llm-leaderboard/details_aloobun__slerp_bun_mistral_7b_v2/blob/main/results_2024-01-23T10-20-17.591541.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.647773501326585,\n \"acc_stderr\": 0.03203032727442697,\n \"acc_norm\": 0.6497939835395516,\n \"acc_norm_stderr\": 0.03267408145525671,\n \"mc1\": 0.3268053855569155,\n \"mc1_stderr\": 0.016419874731135032,\n \"mc2\": 0.48099431406829407,\n \"mc2_stderr\": 0.014890298910191732\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6143344709897611,\n \"acc_stderr\": 0.014224250973257174,\n \"acc_norm\": 0.6561433447098977,\n \"acc_norm_stderr\": 0.01388064457015621\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6600278828918542,\n \"acc_stderr\": 0.004727312448892837,\n \"acc_norm\": 0.8528181637124079,\n \"acc_norm_stderr\": 0.003535630289091451\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7105263157894737,\n \"acc_stderr\": 0.03690677986137283,\n \"acc_norm\": 0.7105263157894737,\n \"acc_norm_stderr\": 0.03690677986137283\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7283018867924528,\n \"acc_stderr\": 0.027377706624670713,\n \"acc_norm\": 0.7283018867924528,\n \"acc_norm_stderr\": 0.027377706624670713\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.03533133389323657,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.03533133389323657\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082635,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082635\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146268,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146268\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.04113914981189261,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.04113914981189261\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41798941798941797,\n \"acc_stderr\": 0.025402555503260912,\n \"acc_norm\": 0.41798941798941797,\n \"acc_norm_stderr\": 0.025402555503260912\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.02328766512726855,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.02328766512726855\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.035176035403610084,\n \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.035176035403610084\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.032876667586034906,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.032876667586034906\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.02962022787479049,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.02962022787479049\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.023381935348121437,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.023381935348121437\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.02407869658063548,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.02407869658063548\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.029381620726465073,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.029381620726465073\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7016806722689075,\n \"acc_stderr\": 0.029719142876342856,\n \"acc_norm\": 0.7016806722689075,\n \"acc_norm_stderr\": 0.029719142876342856\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374301,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374301\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4861111111111111,\n \"acc_stderr\": 0.03408655867977748,\n \"acc_norm\": 0.4861111111111111,\n \"acc_norm_stderr\": 0.03408655867977748\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8284313725490197,\n \"acc_stderr\": 0.026460569561240644,\n \"acc_norm\": 0.8284313725490197,\n \"acc_norm_stderr\": 0.026460569561240644\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290916,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.03498149385462472,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.03498149385462472\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5267857142857143,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.5267857142857143,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8675213675213675,\n \"acc_stderr\": 0.022209309073165612,\n \"acc_norm\": 0.8675213675213675,\n \"acc_norm_stderr\": 0.022209309073165612\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8326947637292464,\n \"acc_stderr\": 0.013347327202920332,\n \"acc_norm\": 0.8326947637292464,\n \"acc_norm_stderr\": 0.013347327202920332\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7485549132947977,\n \"acc_stderr\": 0.02335736578587403,\n \"acc_norm\": 0.7485549132947977,\n \"acc_norm_stderr\": 0.02335736578587403\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.29832402234636873,\n \"acc_stderr\": 0.015301840045129278,\n \"acc_norm\": 0.29832402234636873,\n \"acc_norm_stderr\": 0.015301840045129278\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.02463004897982478,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.02463004897982478\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7009646302250804,\n \"acc_stderr\": 0.02600330111788514,\n \"acc_norm\": 0.7009646302250804,\n \"acc_norm_stderr\": 0.02600330111788514\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.02346842983245115,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.02346842983245115\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4645390070921986,\n \"acc_stderr\": 0.02975238965742705,\n \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.02975238965742705\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46936114732724904,\n \"acc_stderr\": 0.012746237711716634,\n \"acc_norm\": 0.46936114732724904,\n \"acc_norm_stderr\": 0.012746237711716634\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6654411764705882,\n \"acc_stderr\": 0.028661996202335303,\n \"acc_norm\": 0.6654411764705882,\n \"acc_norm_stderr\": 0.028661996202335303\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.018926082916083387,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.018926082916083387\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252091,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252091\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.746938775510204,\n \"acc_stderr\": 0.02783302387139968,\n \"acc_norm\": 0.746938775510204,\n \"acc_norm_stderr\": 0.02783302387139968\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578337,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578337\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3268053855569155,\n \"mc1_stderr\": 0.016419874731135032,\n \"mc2\": 0.48099431406829407,\n \"mc2_stderr\": 0.014890298910191732\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8082083662194159,\n \"acc_stderr\": 0.011065209664659527\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6027293404094011,\n \"acc_stderr\": 0.013478659652337794\n }\n}\n```", "repo_url": "https://huggingface.co/aloobun/slerp_bun_mistral_7b_v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|arc:challenge|25_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|arc:challenge|25_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|gsm8k|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|gsm8k|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hellaswag|10_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hellaswag|10_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T10-16-34.252093.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T10-20-17.591541.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["**/details_harness|winogrande|5_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["**/details_harness|winogrande|5_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T10-20-17.591541.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T10_16_34.252093", "path": ["results_2024-01-23T10-16-34.252093.parquet"]}, {"split": "2024_01_23T10_20_17.591541", "path": ["results_2024-01-23T10-20-17.591541.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T10-20-17.591541.parquet"]}]}]}
2024-01-23T10:22:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of aloobun/slerp_bun_mistral_7b_v2 Dataset automatically created during the evaluation run of model aloobun/slerp_bun_mistral_7b_v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T10:20:17.591541(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of aloobun/slerp_bun_mistral_7b_v2\n\n\n\nDataset automatically created during the evaluation run of model aloobun/slerp_bun_mistral_7b_v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T10:20:17.591541(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of aloobun/slerp_bun_mistral_7b_v2\n\n\n\nDataset automatically created during the evaluation run of model aloobun/slerp_bun_mistral_7b_v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T10:20:17.591541(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
81ec1ea7796524b552f3463890453c07c1204589
# Dataset Card for "fashion_image_caption-100-v2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
pheid/fashion_image_caption-100-v2
[ "region:us" ]
2024-01-23T11:10:55+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 22820471.0, "num_examples": 100}], "download_size": 22820373, "dataset_size": 22820471.0}}
2024-01-23T11:10:57+00:00
[]
[]
TAGS #region-us
# Dataset Card for "fashion_image_caption-100-v2" More Information needed
[ "# Dataset Card for \"fashion_image_caption-100-v2\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"fashion_image_caption-100-v2\"\n\nMore Information needed" ]
6c6824acd43dadd76f8d63f5d087960a3d6534fe
eval file
JIM-Zhangxw/eval_self_rag
[ "region:us" ]
2024-01-23T11:17:56+00:00
{}
2024-01-23T11:30:56+00:00
[]
[]
TAGS #region-us
eval file
[]
[ "TAGS\n#region-us \n" ]
bdf1824c79c0b6d0f898e04c4474cc54122ba8e5
# Dataset Card for Evaluation run of cognitivecomputations/TinyDolphin-2.8-1.1b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [cognitivecomputations/TinyDolphin-2.8-1.1b](https://huggingface.co/cognitivecomputations/TinyDolphin-2.8-1.1b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_cognitivecomputations__TinyDolphin-2.8-1.1b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T11:30:41.082288](https://huggingface.co/datasets/open-llm-leaderboard/details_cognitivecomputations__TinyDolphin-2.8-1.1b/blob/main/results_2024-01-23T11-30-41.082288.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2622018497674234, "acc_stderr": 0.030893654783692482, "acc_norm": 0.26309169403239707, "acc_norm_stderr": 0.03165287942154967, "mc1": 0.2252141982864137, "mc1_stderr": 0.014623240768023509, "mc2": 0.36506322642682476, "mc2_stderr": 0.014134362597043171 }, "harness|arc:challenge|25": { "acc": 0.32593856655290104, "acc_stderr": 0.01369743246669324, "acc_norm": 0.3430034129692833, "acc_norm_stderr": 0.013872423223718174 }, "harness|hellaswag|10": { "acc": 0.46126269667396935, "acc_stderr": 0.004974783753309698, "acc_norm": 0.5944035052778331, "acc_norm_stderr": 0.004900036261309041 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.04688261722621503, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621503 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.3111111111111111, "acc_stderr": 0.039992628766177214, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.039992628766177214 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.25, "acc_stderr": 0.03523807393012047, "acc_norm": 0.25, "acc_norm_stderr": 0.03523807393012047 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.20754716981132076, "acc_stderr": 0.024959918028911274, "acc_norm": 0.20754716981132076, "acc_norm_stderr": 0.024959918028911274 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2152777777777778, "acc_stderr": 0.03437079344106132, "acc_norm": 0.2152777777777778, "acc_norm_stderr": 0.03437079344106132 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.18, "acc_stderr": 0.03861229196653697, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653697 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.23699421965317918, "acc_stderr": 0.03242414757483099, "acc_norm": 0.23699421965317918, "acc_norm_stderr": 0.03242414757483099 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237655, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237655 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2170212765957447, "acc_stderr": 0.026947483121496238, "acc_norm": 0.2170212765957447, "acc_norm_stderr": 0.026947483121496238 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.19298245614035087, "acc_stderr": 0.037124548537213684, "acc_norm": 0.19298245614035087, "acc_norm_stderr": 0.037124548537213684 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2689655172413793, "acc_stderr": 0.036951833116502325, "acc_norm": 0.2689655172413793, "acc_norm_stderr": 0.036951833116502325 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.26455026455026454, "acc_stderr": 0.02271746789770861, "acc_norm": 0.26455026455026454, "acc_norm_stderr": 0.02271746789770861 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.1746031746031746, "acc_stderr": 0.033954900208561116, "acc_norm": 0.1746031746031746, "acc_norm_stderr": 0.033954900208561116 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.25806451612903225, "acc_stderr": 0.02489246917246284, "acc_norm": 0.25806451612903225, "acc_norm_stderr": 0.02489246917246284 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.29064039408866993, "acc_stderr": 0.0319474007226554, "acc_norm": 0.29064039408866993, "acc_norm_stderr": 0.0319474007226554 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.25252525252525254, "acc_stderr": 0.030954055470365904, "acc_norm": 0.25252525252525254, "acc_norm_stderr": 0.030954055470365904 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.22797927461139897, "acc_stderr": 0.030276909945178256, "acc_norm": 0.22797927461139897, "acc_norm_stderr": 0.030276909945178256 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.22564102564102564, "acc_stderr": 0.021193632525148543, "acc_norm": 0.22564102564102564, "acc_norm_stderr": 0.021193632525148543 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.02696242432507383, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.02696242432507383 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.22268907563025211, "acc_stderr": 0.02702543349888236, "acc_norm": 0.22268907563025211, "acc_norm_stderr": 0.02702543349888236 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2185430463576159, "acc_stderr": 0.03374235550425694, "acc_norm": 0.2185430463576159, "acc_norm_stderr": 0.03374235550425694 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.22201834862385322, "acc_stderr": 0.01781884956479663, "acc_norm": 0.22201834862385322, "acc_norm_stderr": 0.01781884956479663 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4444444444444444, "acc_stderr": 0.03388857118502325, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.03388857118502325 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.23529411764705882, "acc_stderr": 0.029771775228145638, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.029771775228145638 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.26905829596412556, "acc_stderr": 0.029763779406874972, "acc_norm": 0.26905829596412556, "acc_norm_stderr": 0.029763779406874972 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.19083969465648856, "acc_stderr": 0.03446513350752599, "acc_norm": 0.19083969465648856, "acc_norm_stderr": 0.03446513350752599 }, "harness|hendrycksTest-international_law|5": { "acc": 0.36363636363636365, "acc_stderr": 0.04391326286724071, "acc_norm": 0.36363636363636365, "acc_norm_stderr": 0.04391326286724071 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.23148148148148148, "acc_stderr": 0.04077494709252627, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3067484662576687, "acc_stderr": 0.036230899157241474, "acc_norm": 0.3067484662576687, "acc_norm_stderr": 0.036230899157241474 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.25892857142857145, "acc_stderr": 0.041577515398656284, "acc_norm": 0.25892857142857145, "acc_norm_stderr": 0.041577515398656284 }, "harness|hendrycksTest-management|5": { "acc": 0.24271844660194175, "acc_stderr": 0.04245022486384495, "acc_norm": 0.24271844660194175, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2564102564102564, "acc_stderr": 0.02860595370200425, "acc_norm": 0.2564102564102564, "acc_norm_stderr": 0.02860595370200425 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.26436781609195403, "acc_stderr": 0.015769984840690525, "acc_norm": 0.26436781609195403, "acc_norm_stderr": 0.015769984840690525 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2861271676300578, "acc_stderr": 0.02433214677913413, "acc_norm": 0.2861271676300578, "acc_norm_stderr": 0.02433214677913413 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808835, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808835 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.27124183006535946, "acc_stderr": 0.02545775669666788, "acc_norm": 0.27124183006535946, "acc_norm_stderr": 0.02545775669666788 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.3054662379421222, "acc_stderr": 0.02616058445014049, "acc_norm": 0.3054662379421222, "acc_norm_stderr": 0.02616058445014049 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.28703703703703703, "acc_stderr": 0.025171041915309684, "acc_norm": 0.28703703703703703, "acc_norm_stderr": 0.025171041915309684 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.28368794326241137, "acc_stderr": 0.02689170942834396, "acc_norm": 0.28368794326241137, "acc_norm_stderr": 0.02689170942834396 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2737940026075619, "acc_stderr": 0.01138861216797939, "acc_norm": 0.2737940026075619, "acc_norm_stderr": 0.01138861216797939 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.16544117647058823, "acc_stderr": 0.022571771025494767, "acc_norm": 0.16544117647058823, "acc_norm_stderr": 0.022571771025494767 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.27941176470588236, "acc_stderr": 0.01815287105153881, "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.01815287105153881 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2818181818181818, "acc_stderr": 0.04309118709946459, "acc_norm": 0.2818181818181818, "acc_norm_stderr": 0.04309118709946459 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.1836734693877551, "acc_stderr": 0.024789071332007646, "acc_norm": 0.1836734693877551, "acc_norm_stderr": 0.024789071332007646 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401467, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401467 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-virology|5": { "acc": 0.21084337349397592, "acc_stderr": 0.0317555478662992, "acc_norm": 0.21084337349397592, "acc_norm_stderr": 0.0317555478662992 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.30409356725146197, "acc_stderr": 0.03528211258245231, "acc_norm": 0.30409356725146197, "acc_norm_stderr": 0.03528211258245231 }, "harness|truthfulqa:mc|0": { "mc1": 0.2252141982864137, "mc1_stderr": 0.014623240768023509, "mc2": 0.36506322642682476, "mc2_stderr": 0.014134362597043171 }, "harness|winogrande|5": { "acc": 0.6069455406471981, "acc_stderr": 0.013727276249108451 }, "harness|gsm8k|5": { "acc": 0.015163002274450341, "acc_stderr": 0.0033660229497263707 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_cognitivecomputations__TinyDolphin-2.8-1.1b
[ "region:us" ]
2024-01-23T11:33:03+00:00
{"pretty_name": "Evaluation run of cognitivecomputations/TinyDolphin-2.8-1.1b", "dataset_summary": "Dataset automatically created during the evaluation run of model [cognitivecomputations/TinyDolphin-2.8-1.1b](https://huggingface.co/cognitivecomputations/TinyDolphin-2.8-1.1b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_cognitivecomputations__TinyDolphin-2.8-1.1b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T11:30:41.082288](https://huggingface.co/datasets/open-llm-leaderboard/details_cognitivecomputations__TinyDolphin-2.8-1.1b/blob/main/results_2024-01-23T11-30-41.082288.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2622018497674234,\n \"acc_stderr\": 0.030893654783692482,\n \"acc_norm\": 0.26309169403239707,\n \"acc_norm_stderr\": 0.03165287942154967,\n \"mc1\": 0.2252141982864137,\n \"mc1_stderr\": 0.014623240768023509,\n \"mc2\": 0.36506322642682476,\n \"mc2_stderr\": 0.014134362597043171\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.32593856655290104,\n \"acc_stderr\": 0.01369743246669324,\n \"acc_norm\": 0.3430034129692833,\n \"acc_norm_stderr\": 0.013872423223718174\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.46126269667396935,\n \"acc_stderr\": 0.004974783753309698,\n \"acc_norm\": 0.5944035052778331,\n \"acc_norm_stderr\": 0.004900036261309041\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621503,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621503\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3111111111111111,\n \"acc_stderr\": 0.039992628766177214,\n \"acc_norm\": 0.3111111111111111,\n \"acc_norm_stderr\": 0.039992628766177214\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03523807393012047,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03523807393012047\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.20754716981132076,\n \"acc_stderr\": 0.024959918028911274,\n \"acc_norm\": 0.20754716981132076,\n \"acc_norm_stderr\": 0.024959918028911274\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2152777777777778,\n \"acc_stderr\": 0.03437079344106132,\n \"acc_norm\": 0.2152777777777778,\n \"acc_norm_stderr\": 0.03437079344106132\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.03861229196653697,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.03861229196653697\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.23699421965317918,\n \"acc_stderr\": 0.03242414757483099,\n \"acc_norm\": 0.23699421965317918,\n \"acc_norm_stderr\": 0.03242414757483099\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237655,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237655\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2170212765957447,\n \"acc_stderr\": 0.026947483121496238,\n \"acc_norm\": 0.2170212765957447,\n \"acc_norm_stderr\": 0.026947483121496238\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.19298245614035087,\n \"acc_stderr\": 0.037124548537213684,\n \"acc_norm\": 0.19298245614035087,\n \"acc_norm_stderr\": 0.037124548537213684\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2689655172413793,\n \"acc_stderr\": 0.036951833116502325,\n \"acc_norm\": 0.2689655172413793,\n \"acc_norm_stderr\": 0.036951833116502325\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.26455026455026454,\n \"acc_stderr\": 0.02271746789770861,\n \"acc_norm\": 0.26455026455026454,\n \"acc_norm_stderr\": 0.02271746789770861\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.1746031746031746,\n \"acc_stderr\": 0.033954900208561116,\n \"acc_norm\": 0.1746031746031746,\n \"acc_norm_stderr\": 0.033954900208561116\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.25806451612903225,\n \"acc_stderr\": 0.02489246917246284,\n \"acc_norm\": 0.25806451612903225,\n \"acc_norm_stderr\": 0.02489246917246284\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.29064039408866993,\n \"acc_stderr\": 0.0319474007226554,\n \"acc_norm\": 0.29064039408866993,\n \"acc_norm_stderr\": 0.0319474007226554\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.25252525252525254,\n \"acc_stderr\": 0.030954055470365904,\n \"acc_norm\": 0.25252525252525254,\n \"acc_norm_stderr\": 0.030954055470365904\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.22797927461139897,\n \"acc_stderr\": 0.030276909945178256,\n \"acc_norm\": 0.22797927461139897,\n \"acc_norm_stderr\": 0.030276909945178256\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.22564102564102564,\n \"acc_stderr\": 0.021193632525148543,\n \"acc_norm\": 0.22564102564102564,\n \"acc_norm_stderr\": 0.021193632525148543\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.02696242432507383,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.02696242432507383\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.22268907563025211,\n \"acc_stderr\": 0.02702543349888236,\n \"acc_norm\": 0.22268907563025211,\n \"acc_norm_stderr\": 0.02702543349888236\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2185430463576159,\n \"acc_stderr\": 0.03374235550425694,\n \"acc_norm\": 0.2185430463576159,\n \"acc_norm_stderr\": 0.03374235550425694\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.22201834862385322,\n \"acc_stderr\": 0.01781884956479663,\n \"acc_norm\": 0.22201834862385322,\n \"acc_norm_stderr\": 0.01781884956479663\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.03388857118502325,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.03388857118502325\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.029771775228145638,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.029771775228145638\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.26905829596412556,\n \"acc_stderr\": 0.029763779406874972,\n \"acc_norm\": 0.26905829596412556,\n \"acc_norm_stderr\": 0.029763779406874972\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.19083969465648856,\n \"acc_stderr\": 0.03446513350752599,\n \"acc_norm\": 0.19083969465648856,\n \"acc_norm_stderr\": 0.03446513350752599\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.36363636363636365,\n \"acc_stderr\": 0.04391326286724071,\n \"acc_norm\": 0.36363636363636365,\n \"acc_norm_stderr\": 0.04391326286724071\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.23148148148148148,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.23148148148148148,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.3067484662576687,\n \"acc_stderr\": 0.036230899157241474,\n \"acc_norm\": 0.3067484662576687,\n \"acc_norm_stderr\": 0.036230899157241474\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.25892857142857145,\n \"acc_stderr\": 0.041577515398656284,\n \"acc_norm\": 0.25892857142857145,\n \"acc_norm_stderr\": 0.041577515398656284\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.24271844660194175,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.24271844660194175,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2564102564102564,\n \"acc_stderr\": 0.02860595370200425,\n \"acc_norm\": 0.2564102564102564,\n \"acc_norm_stderr\": 0.02860595370200425\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.26436781609195403,\n \"acc_stderr\": 0.015769984840690525,\n \"acc_norm\": 0.26436781609195403,\n \"acc_norm_stderr\": 0.015769984840690525\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2861271676300578,\n \"acc_stderr\": 0.02433214677913413,\n \"acc_norm\": 0.2861271676300578,\n \"acc_norm_stderr\": 0.02433214677913413\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24692737430167597,\n \"acc_stderr\": 0.014422292204808835,\n \"acc_norm\": 0.24692737430167597,\n \"acc_norm_stderr\": 0.014422292204808835\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.27124183006535946,\n \"acc_stderr\": 0.02545775669666788,\n \"acc_norm\": 0.27124183006535946,\n \"acc_norm_stderr\": 0.02545775669666788\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.3054662379421222,\n \"acc_stderr\": 0.02616058445014049,\n \"acc_norm\": 0.3054662379421222,\n \"acc_norm_stderr\": 0.02616058445014049\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.28703703703703703,\n \"acc_stderr\": 0.025171041915309684,\n \"acc_norm\": 0.28703703703703703,\n \"acc_norm_stderr\": 0.025171041915309684\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.28368794326241137,\n \"acc_stderr\": 0.02689170942834396,\n \"acc_norm\": 0.28368794326241137,\n \"acc_norm_stderr\": 0.02689170942834396\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2737940026075619,\n \"acc_stderr\": 0.01138861216797939,\n \"acc_norm\": 0.2737940026075619,\n \"acc_norm_stderr\": 0.01138861216797939\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.16544117647058823,\n \"acc_stderr\": 0.022571771025494767,\n \"acc_norm\": 0.16544117647058823,\n \"acc_norm_stderr\": 0.022571771025494767\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.27941176470588236,\n \"acc_stderr\": 0.01815287105153881,\n \"acc_norm\": 0.27941176470588236,\n \"acc_norm_stderr\": 0.01815287105153881\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2818181818181818,\n \"acc_stderr\": 0.04309118709946459,\n \"acc_norm\": 0.2818181818181818,\n \"acc_norm_stderr\": 0.04309118709946459\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.1836734693877551,\n \"acc_stderr\": 0.024789071332007646,\n \"acc_norm\": 0.1836734693877551,\n \"acc_norm_stderr\": 0.024789071332007646\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401467,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401467\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.21084337349397592,\n \"acc_stderr\": 0.0317555478662992,\n \"acc_norm\": 0.21084337349397592,\n \"acc_norm_stderr\": 0.0317555478662992\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.30409356725146197,\n \"acc_stderr\": 0.03528211258245231,\n \"acc_norm\": 0.30409356725146197,\n \"acc_norm_stderr\": 0.03528211258245231\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2252141982864137,\n \"mc1_stderr\": 0.014623240768023509,\n \"mc2\": 0.36506322642682476,\n \"mc2_stderr\": 0.014134362597043171\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6069455406471981,\n \"acc_stderr\": 0.013727276249108451\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.015163002274450341,\n \"acc_stderr\": 0.0033660229497263707\n }\n}\n```", "repo_url": "https://huggingface.co/cognitivecomputations/TinyDolphin-2.8-1.1b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|arc:challenge|25_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|gsm8k|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hellaswag|10_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T11-30-41.082288.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["**/details_harness|winogrande|5_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T11-30-41.082288.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T11_30_41.082288", "path": ["results_2024-01-23T11-30-41.082288.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T11-30-41.082288.parquet"]}]}]}
2024-01-23T11:33:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of cognitivecomputations/TinyDolphin-2.8-1.1b Dataset automatically created during the evaluation run of model cognitivecomputations/TinyDolphin-2.8-1.1b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T11:30:41.082288(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of cognitivecomputations/TinyDolphin-2.8-1.1b\n\n\n\nDataset automatically created during the evaluation run of model cognitivecomputations/TinyDolphin-2.8-1.1b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T11:30:41.082288(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of cognitivecomputations/TinyDolphin-2.8-1.1b\n\n\n\nDataset automatically created during the evaluation run of model cognitivecomputations/TinyDolphin-2.8-1.1b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T11:30:41.082288(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
17669b481f2cc56ac5efb401ababf4b7bdaf345c
This is a dataset created using [vector-io](https://github.com/ai-northstar-tech/vector-io)
aintech/vdf_wolt_food
[ "vdf", "vector-io", "vector-dataset", "vector-embeddings", "region:us" ]
2024-01-23T11:50:43+00:00
{"tags": ["vdf", "vector-io", "vector-dataset", "vector-embeddings"]}
2024-01-25T10:42:26+00:00
[]
[]
TAGS #vdf #vector-io #vector-dataset #vector-embeddings #region-us
This is a dataset created using vector-io
[]
[ "TAGS\n#vdf #vector-io #vector-dataset #vector-embeddings #region-us \n" ]
271748ed2da158cd8aa2cf30c6ec15a81e437321
## sample_mflix.embedded_movies This data set contains details on movies with genres of Western, Action, or Fantasy. Each document contains a single movie, and information such as its title, release year, and cast. In addition, documents in this collection include a plot_embedding field that contains embeddings created using OpenAI's text-embedding-ada-002 embedding model that you can use with the Atlas Search vector search feature. ## Overview This dataset offers a comprehensive collection of data on various movies. It includes details such as plot summaries, genres, runtime, ratings, cast, and more. This dataset is ideal for movie recommendation systems, film analysis, and educational purposes in film studies. ## Dataset Structure Each record in the dataset represents a movie and includes the following fields: - `_id`: A unique identifier for the movie. - `plot`: A brief summary of the movie's plot. - `genres`: A list of genres associated with the movie. - `runtime`: The runtime of the movie in minutes. - `rated`: The MPAA rating of the movie. - `cast`: A list of main actors in the movie. - `num_mflix_comments`: The number of comments on the movie in the mflix platform. - `poster`: A URL to the movie's poster image. - `title`: The title of the movie. - `lastupdated`: The last date and time when the movie information was updated. - `languages`: The languages available in the movie. - `released`: The release date of the movie. - `directors`: A list of directors of the movie. - `writers`: A list of writers of the movie. - `awards`: Information about awards won and nominations. - `year`: The release year of the movie. - `imdb`: IMDb rating, votes, and ID. - `countries`: A list of countries where the movie was produced. - `type`: The type of record, in this case, `movie`. - `tomatoes`: Ratings and reviews from Rotten Tomatoes. - `plot_embedding`: An array of numerical values representing the plot embedding. ## Field Details ### Awards Object - `wins`: The number of awards won. - `nominations`: The number of awards the movie was nominated for. - `text`: A text summary of the awards and nominations. ### IMDb Object - `rating`: The IMDb rating. - `votes`: The number of votes on IMDb. - `id`: The IMDb ID of the movie. ### Tomatoes Object - Contains viewer and critic ratings, reviews count, DVD release date, and production details. ### Plot Embedding - An array representing a numerical embedding of the movie's plot. Useful for machine learning applications, like content-based filtering in recommendation systems. ## Usage The dataset is suited for a range of applications, including: - Analyzing trends in film genres and ratings over time. - Building movie recommendation engines using plot embeddings and genres. - Studying the correlation between cast/directors and movie success. - Educational purposes in film studies and data analysis courses. ## Notes - The data is provided as-is and intended for informational and educational purposes. - Users should verify the accuracy of the information for any critical use-cases. ### Sample Document ``` { "_id": { "$oid": "573a1396f29313caabce582d" }, "plot": "A young swordsman comes to Paris and faces villains, romance, adventure and intrigue with three Musketeer friends.", "genres": ["Action", "Adventure", "Comedy"], "runtime": { "$numberInt": "106" }, "rated": "PG", "cast": ["Oliver Reed", "Raquel Welch", "Richard Chamberlain", "Michael York"], "num_mflix_comments": { "$numberInt": "0" }, "poster": "https://m.media-amazon.com/images/M/MV5BODQwNmI0MDctYzA5Yy00NmJkLWIxNGMtYzgyMDBjMTU0N2IyXkEyXkFqcGdeQXVyMjI4MjA5MzA@._V1_SY1000_SX677_AL_.jpg", "title": "The Three Musketeers", "lastupdated": "2015-09-16 06:21:07.210000000", "languages": ["English"], "released": { "$date": { "$numberLong": "133747200000" } }, "directors": ["Richard Lester"], "writers": ["George MacDonald Fraser (screenplay)", "Alexandre Dumas père (novel)"], "awards": { "wins": { "$numberInt": "4" }, "nominations": { "$numberInt": "7" }, "text": "Won 1 Golden Globe. Another 3 wins & 7 nominations." }, "year": { "$numberInt": "1973" }, "imdb": { "rating": { "$numberDouble": "7.3" }, "votes": { "$numberInt": "11502" }, "id": { "$numberInt": "72281" } }, "countries": ["Spain", "USA", "Panama", "UK"], "type": "movie", "tomatoes": { "viewer": { "rating": { "$numberDouble": "3.5" }, "numReviews": { "$numberInt": "9600" }, "meter": { "$numberInt": "78" } }, "dvd": { "$date": { "$numberLong": "982022400000" } }, "critic": { "rating": { "$numberDouble": "7.1" }, "numReviews": { "$numberInt": "11" }, "meter": { "$numberInt": "82" } }, "lastUpdated": { "$date": { "$numberLong": "1441307415000" } }, "rotten": { "$numberInt": "2" }, "production": "Live Home Video", "fresh": { "$numberInt": "9" } }, "plot_embedding": [ -0.004237316, -0.022958077, -0.005921211, -0.020323543, 0.010051459 ] } ``` ## Ingest Data The small script `ingest.py` can be used to load the data into your MongoDB Atlas cluster. ``` pip install pymongo pip install datasets ## export MONGODB_ATLAS_URI=<your atlas uri> ``` The `ingest.py`: ```python import os from pymongo import MongoClient import datasets from datasets import load_dataset from bson import json_util uri = os.environ.get('MONGODB_ATLAS_URI') client = MongoClient(uri) db_name = 'sample_mflix' collection_name = 'embedded_movies' embedded_movies_collection = client[db_name][collection_name] dataset = load_dataset("AIatMongoDB/embedded_movies") insert_data = [] for movie in dataset['train']: doc_movie = json_util.loads(json_util.dumps(movie)) insert_data.append(doc_movie) if len(insert_data) == 1000: embedded_movies_collection.insert_many(insert_data) print("1000 records ingested") insert_data = [] if len(insert_data) > 0: embedded_movies_collection.insert_many(insert_data) insert_data = [] print("Data Ingested") ```
AIatMongoDB/embedded_movies
[ "language:en", "license:apache-2.0", "movies", "region:us" ]
2024-01-23T12:04:06+00:00
{"language": ["en"], "license": "apache-2.0", "tags": ["movies"]}
2024-02-11T10:06:33+00:00
[]
[ "en" ]
TAGS #language-English #license-apache-2.0 #movies #region-us
## sample_mflix.embedded_movies This data set contains details on movies with genres of Western, Action, or Fantasy. Each document contains a single movie, and information such as its title, release year, and cast. In addition, documents in this collection include a plot_embedding field that contains embeddings created using OpenAI's text-embedding-ada-002 embedding model that you can use with the Atlas Search vector search feature. ## Overview This dataset offers a comprehensive collection of data on various movies. It includes details such as plot summaries, genres, runtime, ratings, cast, and more. This dataset is ideal for movie recommendation systems, film analysis, and educational purposes in film studies. ## Dataset Structure Each record in the dataset represents a movie and includes the following fields: - '_id': A unique identifier for the movie. - 'plot': A brief summary of the movie's plot. - 'genres': A list of genres associated with the movie. - 'runtime': The runtime of the movie in minutes. - 'rated': The MPAA rating of the movie. - 'cast': A list of main actors in the movie. - 'num_mflix_comments': The number of comments on the movie in the mflix platform. - 'poster': A URL to the movie's poster image. - 'title': The title of the movie. - 'lastupdated': The last date and time when the movie information was updated. - 'languages': The languages available in the movie. - 'released': The release date of the movie. - 'directors': A list of directors of the movie. - 'writers': A list of writers of the movie. - 'awards': Information about awards won and nominations. - 'year': The release year of the movie. - 'imdb': IMDb rating, votes, and ID. - 'countries': A list of countries where the movie was produced. - 'type': The type of record, in this case, 'movie'. - 'tomatoes': Ratings and reviews from Rotten Tomatoes. - 'plot_embedding': An array of numerical values representing the plot embedding. ## Field Details ### Awards Object - 'wins': The number of awards won. - 'nominations': The number of awards the movie was nominated for. - 'text': A text summary of the awards and nominations. ### IMDb Object - 'rating': The IMDb rating. - 'votes': The number of votes on IMDb. - 'id': The IMDb ID of the movie. ### Tomatoes Object - Contains viewer and critic ratings, reviews count, DVD release date, and production details. ### Plot Embedding - An array representing a numerical embedding of the movie's plot. Useful for machine learning applications, like content-based filtering in recommendation systems. ## Usage The dataset is suited for a range of applications, including: - Analyzing trends in film genres and ratings over time. - Building movie recommendation engines using plot embeddings and genres. - Studying the correlation between cast/directors and movie success. - Educational purposes in film studies and data analysis courses. ## Notes - The data is provided as-is and intended for informational and educational purposes. - Users should verify the accuracy of the information for any critical use-cases. ### Sample Document ## Ingest Data The small script 'URL' can be used to load the data into your MongoDB Atlas cluster. The 'URL':
[ "## sample_mflix.embedded_movies\n\nThis data set contains details on movies with genres of Western, Action, or Fantasy. Each document contains a single movie, and information such as its title, release year, and cast.\n\nIn addition, documents in this collection include a plot_embedding field that contains embeddings created using OpenAI's text-embedding-ada-002 embedding model that you can use with the Atlas Search vector search feature.", "## Overview\n\nThis dataset offers a comprehensive collection of data on various movies. It includes details such as plot summaries, genres, runtime, ratings, cast, and more. This dataset is ideal for movie recommendation systems, film analysis, and educational purposes in film studies.", "## Dataset Structure\n\nEach record in the dataset represents a movie and includes the following fields:\n\n- '_id': A unique identifier for the movie.\n- 'plot': A brief summary of the movie's plot.\n- 'genres': A list of genres associated with the movie.\n- 'runtime': The runtime of the movie in minutes.\n- 'rated': The MPAA rating of the movie.\n- 'cast': A list of main actors in the movie.\n- 'num_mflix_comments': The number of comments on the movie in the mflix platform.\n- 'poster': A URL to the movie's poster image.\n- 'title': The title of the movie.\n- 'lastupdated': The last date and time when the movie information was updated.\n- 'languages': The languages available in the movie.\n- 'released': The release date of the movie.\n- 'directors': A list of directors of the movie.\n- 'writers': A list of writers of the movie.\n- 'awards': Information about awards won and nominations.\n- 'year': The release year of the movie.\n- 'imdb': IMDb rating, votes, and ID.\n- 'countries': A list of countries where the movie was produced.\n- 'type': The type of record, in this case, 'movie'.\n- 'tomatoes': Ratings and reviews from Rotten Tomatoes.\n- 'plot_embedding': An array of numerical values representing the plot embedding.", "## Field Details", "### Awards Object\n\n- 'wins': The number of awards won.\n- 'nominations': The number of awards the movie was nominated for.\n- 'text': A text summary of the awards and nominations.", "### IMDb Object\n\n- 'rating': The IMDb rating.\n- 'votes': The number of votes on IMDb.\n- 'id': The IMDb ID of the movie.", "### Tomatoes Object\n\n- Contains viewer and critic ratings, reviews count, DVD release date, and production details.", "### Plot Embedding\n\n- An array representing a numerical embedding of the movie's plot. Useful for machine learning applications, like content-based filtering in recommendation systems.", "## Usage\n\nThe dataset is suited for a range of applications, including:\n\n- Analyzing trends in film genres and ratings over time.\n- Building movie recommendation engines using plot embeddings and genres.\n- Studying the correlation between cast/directors and movie success.\n- Educational purposes in film studies and data analysis courses.", "## Notes\n\n- The data is provided as-is and intended for informational and educational purposes.\n- Users should verify the accuracy of the information for any critical use-cases.", "### Sample Document", "## Ingest Data\n\nThe small script 'URL' can be used to load the data into your MongoDB Atlas cluster. \n\n\nThe 'URL':" ]
[ "TAGS\n#language-English #license-apache-2.0 #movies #region-us \n", "## sample_mflix.embedded_movies\n\nThis data set contains details on movies with genres of Western, Action, or Fantasy. Each document contains a single movie, and information such as its title, release year, and cast.\n\nIn addition, documents in this collection include a plot_embedding field that contains embeddings created using OpenAI's text-embedding-ada-002 embedding model that you can use with the Atlas Search vector search feature.", "## Overview\n\nThis dataset offers a comprehensive collection of data on various movies. It includes details such as plot summaries, genres, runtime, ratings, cast, and more. This dataset is ideal for movie recommendation systems, film analysis, and educational purposes in film studies.", "## Dataset Structure\n\nEach record in the dataset represents a movie and includes the following fields:\n\n- '_id': A unique identifier for the movie.\n- 'plot': A brief summary of the movie's plot.\n- 'genres': A list of genres associated with the movie.\n- 'runtime': The runtime of the movie in minutes.\n- 'rated': The MPAA rating of the movie.\n- 'cast': A list of main actors in the movie.\n- 'num_mflix_comments': The number of comments on the movie in the mflix platform.\n- 'poster': A URL to the movie's poster image.\n- 'title': The title of the movie.\n- 'lastupdated': The last date and time when the movie information was updated.\n- 'languages': The languages available in the movie.\n- 'released': The release date of the movie.\n- 'directors': A list of directors of the movie.\n- 'writers': A list of writers of the movie.\n- 'awards': Information about awards won and nominations.\n- 'year': The release year of the movie.\n- 'imdb': IMDb rating, votes, and ID.\n- 'countries': A list of countries where the movie was produced.\n- 'type': The type of record, in this case, 'movie'.\n- 'tomatoes': Ratings and reviews from Rotten Tomatoes.\n- 'plot_embedding': An array of numerical values representing the plot embedding.", "## Field Details", "### Awards Object\n\n- 'wins': The number of awards won.\n- 'nominations': The number of awards the movie was nominated for.\n- 'text': A text summary of the awards and nominations.", "### IMDb Object\n\n- 'rating': The IMDb rating.\n- 'votes': The number of votes on IMDb.\n- 'id': The IMDb ID of the movie.", "### Tomatoes Object\n\n- Contains viewer and critic ratings, reviews count, DVD release date, and production details.", "### Plot Embedding\n\n- An array representing a numerical embedding of the movie's plot. Useful for machine learning applications, like content-based filtering in recommendation systems.", "## Usage\n\nThe dataset is suited for a range of applications, including:\n\n- Analyzing trends in film genres and ratings over time.\n- Building movie recommendation engines using plot embeddings and genres.\n- Studying the correlation between cast/directors and movie success.\n- Educational purposes in film studies and data analysis courses.", "## Notes\n\n- The data is provided as-is and intended for informational and educational purposes.\n- Users should verify the accuracy of the information for any critical use-cases.", "### Sample Document", "## Ingest Data\n\nThe small script 'URL' can be used to load the data into your MongoDB Atlas cluster. \n\n\nThe 'URL':" ]
0d992da324c9110011c0371c3bda169a2ec9f389
# Dataset Card for Evaluation run of xformAI/opt-125m-gqa-ub-6-best-for-KV-cache <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [xformAI/opt-125m-gqa-ub-6-best-for-KV-cache](https://huggingface.co/xformAI/opt-125m-gqa-ub-6-best-for-KV-cache) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_xformAI__opt-125m-gqa-ub-6-best-for-KV-cache", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T12:11:33.435491](https://huggingface.co/datasets/open-llm-leaderboard/details_xformAI__opt-125m-gqa-ub-6-best-for-KV-cache/blob/main/results_2024-01-23T12-11-33.435491.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.23214395574495633, "acc_stderr": 0.029929161673252165, "acc_norm": 0.23167592940331871, "acc_norm_stderr": 0.030715935929569317, "mc1": 0.23255813953488372, "mc1_stderr": 0.014789157531080515, "mc2": 0.4953131184469278, "mc2_stderr": 0.016004347037417377 }, "harness|arc:challenge|25": { "acc": 0.20819112627986347, "acc_stderr": 0.011864866118448069, "acc_norm": 0.24232081911262798, "acc_norm_stderr": 0.012521593295800118 }, "harness|hellaswag|10": { "acc": 0.2590121489743079, "acc_stderr": 0.004371969542814558, "acc_norm": 0.24995020912168892, "acc_norm_stderr": 0.004320990543283153 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20256410256410257, "acc_stderr": 0.020377660970371372, "acc_norm": 0.20256410256410257, "acc_norm_stderr": 0.020377660970371372 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.02974504857267404, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.02974504857267404 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.23255813953488372, "mc1_stderr": 0.014789157531080515, "mc2": 0.4953131184469278, "mc2_stderr": 0.016004347037417377 }, "harness|winogrande|5": { "acc": 0.5169692186266772, "acc_stderr": 0.014044390401612974 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_xformAI__opt-125m-gqa-ub-6-best-for-KV-cache
[ "region:us" ]
2024-01-23T12:08:01+00:00
{"pretty_name": "Evaluation run of xformAI/opt-125m-gqa-ub-6-best-for-KV-cache", "dataset_summary": "Dataset automatically created during the evaluation run of model [xformAI/opt-125m-gqa-ub-6-best-for-KV-cache](https://huggingface.co/xformAI/opt-125m-gqa-ub-6-best-for-KV-cache) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_xformAI__opt-125m-gqa-ub-6-best-for-KV-cache\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T12:11:33.435491](https://huggingface.co/datasets/open-llm-leaderboard/details_xformAI__opt-125m-gqa-ub-6-best-for-KV-cache/blob/main/results_2024-01-23T12-11-33.435491.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23214395574495633,\n \"acc_stderr\": 0.029929161673252165,\n \"acc_norm\": 0.23167592940331871,\n \"acc_norm_stderr\": 0.030715935929569317,\n \"mc1\": 0.23255813953488372,\n \"mc1_stderr\": 0.014789157531080515,\n \"mc2\": 0.4953131184469278,\n \"mc2_stderr\": 0.016004347037417377\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.20819112627986347,\n \"acc_stderr\": 0.011864866118448069,\n \"acc_norm\": 0.24232081911262798,\n \"acc_norm_stderr\": 0.012521593295800118\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2590121489743079,\n \"acc_stderr\": 0.004371969542814558,\n \"acc_norm\": 0.24995020912168892,\n \"acc_norm_stderr\": 0.004320990543283153\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23255813953488372,\n \"mc1_stderr\": 0.014789157531080515,\n \"mc2\": 0.4953131184469278,\n \"mc2_stderr\": 0.016004347037417377\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5169692186266772,\n \"acc_stderr\": 0.014044390401612974\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/xformAI/opt-125m-gqa-ub-6-best-for-KV-cache", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|arc:challenge|25_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|arc:challenge|25_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|gsm8k|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|gsm8k|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hellaswag|10_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hellaswag|10_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T12-06-15.262886.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T12-11-33.435491.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["**/details_harness|winogrande|5_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["**/details_harness|winogrande|5_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T12-11-33.435491.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T12_06_15.262886", "path": ["results_2024-01-23T12-06-15.262886.parquet"]}, {"split": "2024_01_23T12_11_33.435491", "path": ["results_2024-01-23T12-11-33.435491.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T12-11-33.435491.parquet"]}]}]}
2024-01-23T12:13:20+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of xformAI/opt-125m-gqa-ub-6-best-for-KV-cache Dataset automatically created during the evaluation run of model xformAI/opt-125m-gqa-ub-6-best-for-KV-cache on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T12:11:33.435491(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of xformAI/opt-125m-gqa-ub-6-best-for-KV-cache\n\n\n\nDataset automatically created during the evaluation run of model xformAI/opt-125m-gqa-ub-6-best-for-KV-cache on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T12:11:33.435491(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of xformAI/opt-125m-gqa-ub-6-best-for-KV-cache\n\n\n\nDataset automatically created during the evaluation run of model xformAI/opt-125m-gqa-ub-6-best-for-KV-cache on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T12:11:33.435491(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
3e956eddc997d390fa9ac403f7111d4803ec7837
# Dataset Card for "SlimOrca-tr" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
malhajar/SlimOrca-tr
[ "region:us" ]
2024-01-23T12:19:37+00:00
{"dataset_info": {"features": [{"name": "conversations", "dtype": "string"}, {"name": "conversations-turkish", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2017451170, "num_examples": 517518}], "download_size": 1025268484, "dataset_size": 2017451170}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-23T12:30:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for "SlimOrca-tr" More Information needed
[ "# Dataset Card for \"SlimOrca-tr\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"SlimOrca-tr\"\n\nMore Information needed" ]
7ec4aceb2a5794f97550e34a22c3db0da21fd0e2
# Dataset Card for Evaluation run of uukuguy/speechless-zephyr-code-functionary-7b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [uukuguy/speechless-zephyr-code-functionary-7b](https://huggingface.co/uukuguy/speechless-zephyr-code-functionary-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_uukuguy__speechless-zephyr-code-functionary-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T12:18:03.845597](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-zephyr-code-functionary-7b/blob/main/results_2024-01-23T12-18-03.845597.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6448787410516414, "acc_stderr": 0.03224509099981482, "acc_norm": 0.649790965396691, "acc_norm_stderr": 0.032889685494221804, "mc1": 0.30599755201958384, "mc1_stderr": 0.01613222972815504, "mc2": 0.44994239471775765, "mc2_stderr": 0.014464163311832926 }, "harness|arc:challenge|25": { "acc": 0.5878839590443686, "acc_stderr": 0.014383915302225403, "acc_norm": 0.6151877133105802, "acc_norm_stderr": 0.014218371065251102 }, "harness|hellaswag|10": { "acc": 0.6409081856203943, "acc_stderr": 0.004787537385153, "acc_norm": 0.8387771360286795, "acc_norm_stderr": 0.0036698484004877773 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6644736842105263, "acc_stderr": 0.03842498559395268, "acc_norm": 0.6644736842105263, "acc_norm_stderr": 0.03842498559395268 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7094339622641509, "acc_stderr": 0.02794321998933714, "acc_norm": 0.7094339622641509, "acc_norm_stderr": 0.02794321998933714 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7291666666666666, "acc_stderr": 0.03716177437566017, "acc_norm": 0.7291666666666666, "acc_norm_stderr": 0.03716177437566017 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6416184971098265, "acc_stderr": 0.03656343653353159, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.03656343653353159 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.04858083574266346, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.04858083574266346 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932261, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932261 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3941798941798942, "acc_stderr": 0.02516798233389414, "acc_norm": 0.3941798941798942, "acc_norm_stderr": 0.02516798233389414 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7677419354838709, "acc_stderr": 0.024022256130308235, "acc_norm": 0.7677419354838709, "acc_norm_stderr": 0.024022256130308235 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5369458128078818, "acc_stderr": 0.035083705204426656, "acc_norm": 0.5369458128078818, "acc_norm_stderr": 0.035083705204426656 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494563, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494563 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6692307692307692, "acc_stderr": 0.023854795680971128, "acc_norm": 0.6692307692307692, "acc_norm_stderr": 0.023854795680971128 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616258, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616258 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6596638655462185, "acc_stderr": 0.030778057422931673, "acc_norm": 0.6596638655462185, "acc_norm_stderr": 0.030778057422931673 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3973509933774834, "acc_stderr": 0.0399552400768168, "acc_norm": 0.3973509933774834, "acc_norm_stderr": 0.0399552400768168 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8165137614678899, "acc_stderr": 0.0165952597103993, "acc_norm": 0.8165137614678899, "acc_norm_stderr": 0.0165952597103993 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5370370370370371, "acc_stderr": 0.03400603625538272, "acc_norm": 0.5370370370370371, "acc_norm_stderr": 0.03400603625538272 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7990196078431373, "acc_stderr": 0.028125972265654373, "acc_norm": 0.7990196078431373, "acc_norm_stderr": 0.028125972265654373 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7679324894514767, "acc_stderr": 0.02747974455080851, "acc_norm": 0.7679324894514767, "acc_norm_stderr": 0.02747974455080851 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7085201793721974, "acc_stderr": 0.03050028317654585, "acc_norm": 0.7085201793721974, "acc_norm_stderr": 0.03050028317654585 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159463, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159463 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7975460122699386, "acc_stderr": 0.03157065078911901, "acc_norm": 0.7975460122699386, "acc_norm_stderr": 0.03157065078911901 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8058252427184466, "acc_stderr": 0.03916667762822585, "acc_norm": 0.8058252427184466, "acc_norm_stderr": 0.03916667762822585 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.021586494001281386, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.021586494001281386 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8186462324393359, "acc_stderr": 0.013778693778464076, "acc_norm": 0.8186462324393359, "acc_norm_stderr": 0.013778693778464076 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7283236994219653, "acc_stderr": 0.023948512905468358, "acc_norm": 0.7283236994219653, "acc_norm_stderr": 0.023948512905468358 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.32513966480446926, "acc_stderr": 0.01566654278505356, "acc_norm": 0.32513966480446926, "acc_norm_stderr": 0.01566654278505356 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7516339869281046, "acc_stderr": 0.024739981355113592, "acc_norm": 0.7516339869281046, "acc_norm_stderr": 0.024739981355113592 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7138263665594855, "acc_stderr": 0.025670259242188933, "acc_norm": 0.7138263665594855, "acc_norm_stderr": 0.025670259242188933 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7438271604938271, "acc_stderr": 0.024288533637726095, "acc_norm": 0.7438271604938271, "acc_norm_stderr": 0.024288533637726095 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.49645390070921985, "acc_stderr": 0.02982674915328092, "acc_norm": 0.49645390070921985, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46936114732724904, "acc_stderr": 0.012746237711716634, "acc_norm": 0.46936114732724904, "acc_norm_stderr": 0.012746237711716634 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.028418208619406755, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.028418208619406755 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6862745098039216, "acc_stderr": 0.01877168389352818, "acc_norm": 0.6862745098039216, "acc_norm_stderr": 0.01877168389352818 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.02797982353874455, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.03265986323710906, "acc_norm": 0.88, "acc_norm_stderr": 0.03265986323710906 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.038695433234721015, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.038695433234721015 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8187134502923976, "acc_stderr": 0.029547741687640038, "acc_norm": 0.8187134502923976, "acc_norm_stderr": 0.029547741687640038 }, "harness|truthfulqa:mc|0": { "mc1": 0.30599755201958384, "mc1_stderr": 0.01613222972815504, "mc2": 0.44994239471775765, "mc2_stderr": 0.014464163311832926 }, "harness|winogrande|5": { "acc": 0.7868981846882399, "acc_stderr": 0.011508957690722764 }, "harness|gsm8k|5": { "acc": 0.4382107657316149, "acc_stderr": 0.013666915917255072 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_uukuguy__speechless-zephyr-code-functionary-7b
[ "region:us" ]
2024-01-23T12:20:23+00:00
{"pretty_name": "Evaluation run of uukuguy/speechless-zephyr-code-functionary-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [uukuguy/speechless-zephyr-code-functionary-7b](https://huggingface.co/uukuguy/speechless-zephyr-code-functionary-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_uukuguy__speechless-zephyr-code-functionary-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T12:18:03.845597](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-zephyr-code-functionary-7b/blob/main/results_2024-01-23T12-18-03.845597.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6448787410516414,\n \"acc_stderr\": 0.03224509099981482,\n \"acc_norm\": 0.649790965396691,\n \"acc_norm_stderr\": 0.032889685494221804,\n \"mc1\": 0.30599755201958384,\n \"mc1_stderr\": 0.01613222972815504,\n \"mc2\": 0.44994239471775765,\n \"mc2_stderr\": 0.014464163311832926\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5878839590443686,\n \"acc_stderr\": 0.014383915302225403,\n \"acc_norm\": 0.6151877133105802,\n \"acc_norm_stderr\": 0.014218371065251102\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6409081856203943,\n \"acc_stderr\": 0.004787537385153,\n \"acc_norm\": 0.8387771360286795,\n \"acc_norm_stderr\": 0.0036698484004877773\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6644736842105263,\n \"acc_stderr\": 0.03842498559395268,\n \"acc_norm\": 0.6644736842105263,\n \"acc_norm_stderr\": 0.03842498559395268\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7094339622641509,\n \"acc_stderr\": 0.02794321998933714,\n \"acc_norm\": 0.7094339622641509,\n \"acc_norm_stderr\": 0.02794321998933714\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7291666666666666,\n \"acc_stderr\": 0.03716177437566017,\n \"acc_norm\": 0.7291666666666666,\n \"acc_norm_stderr\": 0.03716177437566017\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.03656343653353159,\n \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.03656343653353159\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.04858083574266346,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.04858083574266346\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932261,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932261\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3941798941798942,\n \"acc_stderr\": 0.02516798233389414,\n \"acc_norm\": 0.3941798941798942,\n \"acc_norm_stderr\": 0.02516798233389414\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5369458128078818,\n \"acc_stderr\": 0.035083705204426656,\n \"acc_norm\": 0.5369458128078818,\n \"acc_norm_stderr\": 0.035083705204426656\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494563,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494563\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.023854795680971128,\n \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.023854795680971128\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616258,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616258\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6596638655462185,\n \"acc_stderr\": 0.030778057422931673,\n \"acc_norm\": 0.6596638655462185,\n \"acc_norm_stderr\": 0.030778057422931673\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3973509933774834,\n \"acc_stderr\": 0.0399552400768168,\n \"acc_norm\": 0.3973509933774834,\n \"acc_norm_stderr\": 0.0399552400768168\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8165137614678899,\n \"acc_stderr\": 0.0165952597103993,\n \"acc_norm\": 0.8165137614678899,\n \"acc_norm_stderr\": 0.0165952597103993\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5370370370370371,\n \"acc_stderr\": 0.03400603625538272,\n \"acc_norm\": 0.5370370370370371,\n \"acc_norm_stderr\": 0.03400603625538272\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7990196078431373,\n \"acc_stderr\": 0.028125972265654373,\n \"acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.028125972265654373\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7679324894514767,\n \"acc_stderr\": 0.02747974455080851,\n \"acc_norm\": 0.7679324894514767,\n \"acc_norm_stderr\": 0.02747974455080851\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7085201793721974,\n \"acc_stderr\": 0.03050028317654585,\n \"acc_norm\": 0.7085201793721974,\n \"acc_norm_stderr\": 0.03050028317654585\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159463,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159463\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7975460122699386,\n \"acc_stderr\": 0.03157065078911901,\n \"acc_norm\": 0.7975460122699386,\n \"acc_norm_stderr\": 0.03157065078911901\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822585,\n \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822585\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.021586494001281386,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.021586494001281386\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8186462324393359,\n \"acc_stderr\": 0.013778693778464076,\n \"acc_norm\": 0.8186462324393359,\n \"acc_norm_stderr\": 0.013778693778464076\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7283236994219653,\n \"acc_stderr\": 0.023948512905468358,\n \"acc_norm\": 0.7283236994219653,\n \"acc_norm_stderr\": 0.023948512905468358\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.32513966480446926,\n \"acc_stderr\": 0.01566654278505356,\n \"acc_norm\": 0.32513966480446926,\n \"acc_norm_stderr\": 0.01566654278505356\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7516339869281046,\n \"acc_stderr\": 0.024739981355113592,\n \"acc_norm\": 0.7516339869281046,\n \"acc_norm_stderr\": 0.024739981355113592\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n \"acc_stderr\": 0.025670259242188933,\n \"acc_norm\": 0.7138263665594855,\n \"acc_norm_stderr\": 0.025670259242188933\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7438271604938271,\n \"acc_stderr\": 0.024288533637726095,\n \"acc_norm\": 0.7438271604938271,\n \"acc_norm_stderr\": 0.024288533637726095\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.49645390070921985,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.49645390070921985,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46936114732724904,\n \"acc_stderr\": 0.012746237711716634,\n \"acc_norm\": 0.46936114732724904,\n \"acc_norm_stderr\": 0.012746237711716634\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.028418208619406755,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.028418208619406755\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6862745098039216,\n \"acc_stderr\": 0.01877168389352818,\n \"acc_norm\": 0.6862745098039216,\n \"acc_norm_stderr\": 0.01877168389352818\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.038695433234721015,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.038695433234721015\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.029547741687640038,\n \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.029547741687640038\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.30599755201958384,\n \"mc1_stderr\": 0.01613222972815504,\n \"mc2\": 0.44994239471775765,\n \"mc2_stderr\": 0.014464163311832926\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7868981846882399,\n \"acc_stderr\": 0.011508957690722764\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.4382107657316149,\n \"acc_stderr\": 0.013666915917255072\n }\n}\n```", "repo_url": "https://huggingface.co/uukuguy/speechless-zephyr-code-functionary-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|arc:challenge|25_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|gsm8k|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hellaswag|10_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T12-18-03.845597.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["**/details_harness|winogrande|5_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T12-18-03.845597.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T12_18_03.845597", "path": ["results_2024-01-23T12-18-03.845597.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T12-18-03.845597.parquet"]}]}]}
2024-01-23T12:20:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of uukuguy/speechless-zephyr-code-functionary-7b Dataset automatically created during the evaluation run of model uukuguy/speechless-zephyr-code-functionary-7b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T12:18:03.845597(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of uukuguy/speechless-zephyr-code-functionary-7b\n\n\n\nDataset automatically created during the evaluation run of model uukuguy/speechless-zephyr-code-functionary-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T12:18:03.845597(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of uukuguy/speechless-zephyr-code-functionary-7b\n\n\n\nDataset automatically created during the evaluation run of model uukuguy/speechless-zephyr-code-functionary-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T12:18:03.845597(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
0f0d8da20b676a079494c99ba3e6f006a5cb666f
# Dataset Card for Evaluation run of xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache](https://huggingface.co/xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_xformAI__facebook-opt-125m-qcqa-ub-6-best-for-KV-cache", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T12:20:26.154798](https://huggingface.co/datasets/open-llm-leaderboard/details_xformAI__facebook-opt-125m-qcqa-ub-6-best-for-KV-cache/blob/main/results_2024-01-23T12-20-26.154798.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2319695967294965, "acc_stderr": 0.029926224399627676, "acc_norm": 0.23167761722970215, "acc_norm_stderr": 0.03071594565432422, "mc1": 0.22276621787025705, "mc1_stderr": 0.014566506961396743, "mc2": 0.484054603889482, "mc2_stderr": 0.01611164615468067 }, "harness|arc:challenge|25": { "acc": 0.19880546075085323, "acc_stderr": 0.011662850198175534, "acc_norm": 0.24232081911262798, "acc_norm_stderr": 0.012521593295800116 }, "harness|hellaswag|10": { "acc": 0.26249751045608444, "acc_stderr": 0.0043909233532005605, "acc_norm": 0.2500497908783111, "acc_norm_stderr": 0.004321564303822447 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20256410256410257, "acc_stderr": 0.020377660970371372, "acc_norm": 0.20256410256410257, "acc_norm_stderr": 0.020377660970371372 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.02974504857267404, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.02974504857267404 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.22276621787025705, "mc1_stderr": 0.014566506961396743, "mc2": 0.484054603889482, "mc2_stderr": 0.01611164615468067 }, "harness|winogrande|5": { "acc": 0.5122336227308603, "acc_stderr": 0.01404827882040562 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_xformAI__facebook-opt-125m-qcqa-ub-6-best-for-KV-cache
[ "region:us" ]
2024-01-23T12:22:07+00:00
{"pretty_name": "Evaluation run of xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache", "dataset_summary": "Dataset automatically created during the evaluation run of model [xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache](https://huggingface.co/xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_xformAI__facebook-opt-125m-qcqa-ub-6-best-for-KV-cache\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T12:20:26.154798](https://huggingface.co/datasets/open-llm-leaderboard/details_xformAI__facebook-opt-125m-qcqa-ub-6-best-for-KV-cache/blob/main/results_2024-01-23T12-20-26.154798.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2319695967294965,\n \"acc_stderr\": 0.029926224399627676,\n \"acc_norm\": 0.23167761722970215,\n \"acc_norm_stderr\": 0.03071594565432422,\n \"mc1\": 0.22276621787025705,\n \"mc1_stderr\": 0.014566506961396743,\n \"mc2\": 0.484054603889482,\n \"mc2_stderr\": 0.01611164615468067\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.19880546075085323,\n \"acc_stderr\": 0.011662850198175534,\n \"acc_norm\": 0.24232081911262798,\n \"acc_norm_stderr\": 0.012521593295800116\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.26249751045608444,\n \"acc_stderr\": 0.0043909233532005605,\n \"acc_norm\": 0.2500497908783111,\n \"acc_norm_stderr\": 0.004321564303822447\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22276621787025705,\n \"mc1_stderr\": 0.014566506961396743,\n \"mc2\": 0.484054603889482,\n \"mc2_stderr\": 0.01611164615468067\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5122336227308603,\n \"acc_stderr\": 0.01404827882040562\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|arc:challenge|25_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|gsm8k|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hellaswag|10_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T12-20-26.154798.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["**/details_harness|winogrande|5_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T12-20-26.154798.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T12_20_26.154798", "path": ["results_2024-01-23T12-20-26.154798.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T12-20-26.154798.parquet"]}]}]}
2024-01-23T12:22:31+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache Dataset automatically created during the evaluation run of model xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T12:20:26.154798(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache\n\n\n\nDataset automatically created during the evaluation run of model xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T12:20:26.154798(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache\n\n\n\nDataset automatically created during the evaluation run of model xformAI/facebook-opt-125m-qcqa-ub-6-best-for-KV-cache on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T12:20:26.154798(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
06e987dbbca275baa9753996a45ec58ff98a5aa6
### Dataset Description Predict ChemicalConcentrations.csv given Observations.csv. This is a signal unmixing problem because the observations are a weighted sum of the chemical concentrations and pure spectra.
CelestineP/signalunmix
[ "task_categories:tabular-regression", "size_categories:n<1K", "chemistry", "region:us" ]
2024-01-23T12:25:16+00:00
{"size_categories": ["n<1K"], "task_categories": ["tabular-regression"], "tags": ["chemistry"]}
2024-01-23T13:12:33+00:00
[]
[]
TAGS #task_categories-tabular-regression #size_categories-n<1K #chemistry #region-us
### Dataset Description Predict URL given URL. This is a signal unmixing problem because the observations are a weighted sum of the chemical concentrations and pure spectra.
[ "### Dataset Description\n\nPredict URL given URL. This is a signal unmixing problem because the observations are a weighted sum of the chemical concentrations and pure spectra." ]
[ "TAGS\n#task_categories-tabular-regression #size_categories-n<1K #chemistry #region-us \n", "### Dataset Description\n\nPredict URL given URL. This is a signal unmixing problem because the observations are a weighted sum of the chemical concentrations and pure spectra." ]
d5c4dd500f9e64d603383297772ff02430c0b283
# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0](https://huggingface.co/zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v5.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T12:36:33.042804](https://huggingface.co/datasets/open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v5.0/blob/main/results_2024-01-23T12-36-33.042804.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6528243003198376, "acc_stderr": 0.03209119282314623, "acc_norm": 0.6520916850797007, "acc_norm_stderr": 0.032761861515248, "mc1": 0.576499388004896, "mc1_stderr": 0.017297421448534748, "mc2": 0.698336604934767, "mc2_stderr": 0.015067239704744356 }, "harness|arc:challenge|25": { "acc": 0.7175767918088737, "acc_stderr": 0.013155456884097222, "acc_norm": 0.7363481228668942, "acc_norm_stderr": 0.012875929151297044 }, "harness|hellaswag|10": { "acc": 0.7226648078072098, "acc_stderr": 0.004467684132772412, "acc_norm": 0.8892650866361282, "acc_norm_stderr": 0.003131622628199085 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.041716541613545426, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.041716541613545426 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7171052631578947, "acc_stderr": 0.03665349695640767, "acc_norm": 0.7171052631578947, "acc_norm_stderr": 0.03665349695640767 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7245283018867924, "acc_stderr": 0.027495663683724057, "acc_norm": 0.7245283018867924, "acc_norm_stderr": 0.027495663683724057 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.0358687928008034, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.0358687928008034 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.049406356306056595, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.47368421052631576, "acc_stderr": 0.046970851366478626, "acc_norm": 0.47368421052631576, "acc_norm_stderr": 0.046970851366478626 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555498, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555498 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4126984126984127, "acc_stderr": 0.025355741263055277, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.025355741263055277 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7870967741935484, "acc_stderr": 0.023287665127268545, "acc_norm": 0.7870967741935484, "acc_norm_stderr": 0.023287665127268545 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7636363636363637, "acc_stderr": 0.03317505930009181, "acc_norm": 0.7636363636363637, "acc_norm_stderr": 0.03317505930009181 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.797979797979798, "acc_stderr": 0.028606204289229872, "acc_norm": 0.797979797979798, "acc_norm_stderr": 0.028606204289229872 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033456, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033456 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6717948717948717, "acc_stderr": 0.023807633198657262, "acc_norm": 0.6717948717948717, "acc_norm_stderr": 0.023807633198657262 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.028493465091028593, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.028493465091028593 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6722689075630253, "acc_stderr": 0.03048991141767323, "acc_norm": 0.6722689075630253, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8440366972477065, "acc_stderr": 0.01555580271359017, "acc_norm": 0.8440366972477065, "acc_norm_stderr": 0.01555580271359017 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5277777777777778, "acc_stderr": 0.0340470532865388, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931792, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931792 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7932489451476793, "acc_stderr": 0.0263616516683891, "acc_norm": 0.7932489451476793, "acc_norm_stderr": 0.0263616516683891 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7709923664122137, "acc_stderr": 0.036853466317118506, "acc_norm": 0.7709923664122137, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228732, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228732 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406964, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406964 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8250319284802043, "acc_stderr": 0.013586619219903341, "acc_norm": 0.8250319284802043, "acc_norm_stderr": 0.013586619219903341 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7312138728323699, "acc_stderr": 0.023868003262500104, "acc_norm": 0.7312138728323699, "acc_norm_stderr": 0.023868003262500104 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.44692737430167595, "acc_stderr": 0.016628030039647614, "acc_norm": 0.44692737430167595, "acc_norm_stderr": 0.016628030039647614 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.0256468630971379, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.0256468630971379 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7138263665594855, "acc_stderr": 0.025670259242188936, "acc_norm": 0.7138263665594855, "acc_norm_stderr": 0.025670259242188936 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7530864197530864, "acc_stderr": 0.023993501709042107, "acc_norm": 0.7530864197530864, "acc_norm_stderr": 0.023993501709042107 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48226950354609927, "acc_stderr": 0.02980873964223777, "acc_norm": 0.48226950354609927, "acc_norm_stderr": 0.02980873964223777 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47131681877444587, "acc_stderr": 0.012749206007657476, "acc_norm": 0.47131681877444587, "acc_norm_stderr": 0.012749206007657476 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6801470588235294, "acc_stderr": 0.028332959514031208, "acc_norm": 0.6801470588235294, "acc_norm_stderr": 0.028332959514031208 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6683006535947712, "acc_stderr": 0.01904748523936038, "acc_norm": 0.6683006535947712, "acc_norm_stderr": 0.01904748523936038 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.746938775510204, "acc_stderr": 0.027833023871399673, "acc_norm": 0.746938775510204, "acc_norm_stderr": 0.027833023871399673 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197771, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197771 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699122, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699122 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.576499388004896, "mc1_stderr": 0.017297421448534748, "mc2": 0.698336604934767, "mc2_stderr": 0.015067239704744356 }, "harness|winogrande|5": { "acc": 0.8397790055248618, "acc_stderr": 0.010309209498187479 }, "harness|gsm8k|5": { "acc": 0.6944655041698257, "acc_stderr": 0.012688134076726879 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v5.0
[ "region:us" ]
2024-01-23T12:38:47+00:00
{"pretty_name": "Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0](https://huggingface.co/zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v5.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T12:36:33.042804](https://huggingface.co/datasets/open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v5.0/blob/main/results_2024-01-23T12-36-33.042804.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6528243003198376,\n \"acc_stderr\": 0.03209119282314623,\n \"acc_norm\": 0.6520916850797007,\n \"acc_norm_stderr\": 0.032761861515248,\n \"mc1\": 0.576499388004896,\n \"mc1_stderr\": 0.017297421448534748,\n \"mc2\": 0.698336604934767,\n \"mc2_stderr\": 0.015067239704744356\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7175767918088737,\n \"acc_stderr\": 0.013155456884097222,\n \"acc_norm\": 0.7363481228668942,\n \"acc_norm_stderr\": 0.012875929151297044\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7226648078072098,\n \"acc_stderr\": 0.004467684132772412,\n \"acc_norm\": 0.8892650866361282,\n \"acc_norm_stderr\": 0.003131622628199085\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7171052631578947,\n \"acc_stderr\": 0.03665349695640767,\n \"acc_norm\": 0.7171052631578947,\n \"acc_norm_stderr\": 0.03665349695640767\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7245283018867924,\n \"acc_stderr\": 0.027495663683724057,\n \"acc_norm\": 0.7245283018867924,\n \"acc_norm_stderr\": 0.027495663683724057\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.0358687928008034,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.0358687928008034\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4126984126984127,\n \"acc_stderr\": 0.025355741263055277,\n \"acc_norm\": 0.4126984126984127,\n \"acc_norm_stderr\": 0.025355741263055277\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.023287665127268545,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.023287665127268545\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009181,\n \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009181\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.028606204289229872,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.028606204289229872\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6717948717948717,\n \"acc_stderr\": 0.023807633198657262,\n \"acc_norm\": 0.6717948717948717,\n \"acc_norm_stderr\": 0.023807633198657262\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028593,\n \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028593\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.03048991141767323,\n \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.03048991141767323\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8440366972477065,\n \"acc_stderr\": 0.01555580271359017,\n \"acc_norm\": 0.8440366972477065,\n \"acc_norm_stderr\": 0.01555580271359017\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931792,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931792\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7932489451476793,\n \"acc_stderr\": 0.0263616516683891,\n \"acc_norm\": 0.7932489451476793,\n \"acc_norm_stderr\": 0.0263616516683891\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228732,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228732\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8250319284802043,\n \"acc_stderr\": 0.013586619219903341,\n \"acc_norm\": 0.8250319284802043,\n \"acc_norm_stderr\": 0.013586619219903341\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7312138728323699,\n \"acc_stderr\": 0.023868003262500104,\n \"acc_norm\": 0.7312138728323699,\n \"acc_norm_stderr\": 0.023868003262500104\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.44692737430167595,\n \"acc_stderr\": 0.016628030039647614,\n \"acc_norm\": 0.44692737430167595,\n \"acc_norm_stderr\": 0.016628030039647614\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.0256468630971379,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.0256468630971379\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n \"acc_stderr\": 0.025670259242188936,\n \"acc_norm\": 0.7138263665594855,\n \"acc_norm_stderr\": 0.025670259242188936\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.023993501709042107,\n \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.023993501709042107\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48226950354609927,\n \"acc_stderr\": 0.02980873964223777,\n \"acc_norm\": 0.48226950354609927,\n \"acc_norm_stderr\": 0.02980873964223777\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47131681877444587,\n \"acc_stderr\": 0.012749206007657476,\n \"acc_norm\": 0.47131681877444587,\n \"acc_norm_stderr\": 0.012749206007657476\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.028332959514031208,\n \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.028332959514031208\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.01904748523936038,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.01904748523936038\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.746938775510204,\n \"acc_stderr\": 0.027833023871399673,\n \"acc_norm\": 0.746938775510204,\n \"acc_norm_stderr\": 0.027833023871399673\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197771,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197771\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.576499388004896,\n \"mc1_stderr\": 0.017297421448534748,\n \"mc2\": 0.698336604934767,\n \"mc2_stderr\": 0.015067239704744356\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8397790055248618,\n \"acc_stderr\": 0.010309209498187479\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6944655041698257,\n \"acc_stderr\": 0.012688134076726879\n }\n}\n```", "repo_url": "https://huggingface.co/zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|arc:challenge|25_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|gsm8k|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hellaswag|10_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T12-36-33.042804.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["**/details_harness|winogrande|5_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T12-36-33.042804.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T12_36_33.042804", "path": ["results_2024-01-23T12-36-33.042804.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T12-36-33.042804.parquet"]}]}]}
2024-01-23T12:39:09+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0 Dataset automatically created during the evaluation run of model zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T12:36:33.042804(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0\n\n\n\nDataset automatically created during the evaluation run of model zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T12:36:33.042804(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0\n\n\n\nDataset automatically created during the evaluation run of model zhengr/MixTAO-7Bx2-MoE-Instruct-v5.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T12:36:33.042804(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
258c6489620cc326480af2ba4102c81b58d03c86
# E5-finetune Dataset E5-finetune Dataset is a curated collection of query-passage pairs, encompassing a total of 870k examples. This dataset is specifically designed for fine-tuning models to extend their input length capabilities from 512 tokens to 1024 tokens. The primary focus is on accumulating long-context passages. ## Dataset in English The dataset samples long-context passage examples from various sources, ensuring a rich and diverse collection. The sources include: - **SQuAD**: Approximately 80k examples. Adjacent passages have been merged to form longer passages, suitable for extended input length training. - **Natural Question**: short passage dataset - **robust04**: A collection of (question, passage) pairs from news sources, filtered specifically to retain long-context examples. - **wikihow**: (summary, passage) from wikihow - **eli5**: short passage dataset ## Dataset in French The existing french dataset is very limited, LLM generation method is used to expand the dataset. To generate (question, passage) dataset with LLM: 1. Gather a set of pure texts of different sources. 2. Ask LLM to generate questions based on the give texts. - **LLM generated examples** - **textbook and novels**: These sources provide a rich narrative and educational context, offering a wide range of topics and themes. - **wikipedia**: Wikipedia articles contribute significantly to the breadth of the dataset. - **OpenSource examples** - **FQuAD**: A French question-answering dataset, known for its quality and reliability. - **Piaf**: A dataset tailored for question-answering systems, focusing on French language intricacies. - **wikihow**: The French version of WikiHow offers practical, instructional content, adding another dimension to the dataset. ## Dataset summary | Source | Language | Context Length | Num. examples | |------------------|----------|----------------|---------------| | SQuAD | en | Mixed | 80k | | Natural Question | en | Short | 100k | | Robust 04 | en | Long | 130k | | wikihow | en | Mixed | 130k | | eli5 | en | Short | 70k | | textbook/novels | fr | Mixed | 190k | | wikipedia | fr | Mixed | 90k | | FQuAD + Piaf | fr | Short | 20k | | wikihow | fr | Mixed | 60k | ### Specific doomains The textbook dataset generated with LLM in French covers large academical domains. Here I list the name of the book grouped by its domain. **History:** - "Contre-histoire du libéralisme" - "Histoire de l'Émigration pendant la Révolution Française" - "Histoire de la littérature française." - "Histoire des mouvements sociaux en France" - "Histoire du surréalisme" - "La guerre froide" - "Les Chaînes de l'Esclavage" - "Les Femmes Avant le Patriarcat" - "Patrimoine_ une histoire vraie" **Scientific:** - "Anthropologie" - "Classes préparatoires" - "Fondamentaux de la vie sociale" - "Histoire de la Physique et Chimie" - "Le carbone renouvelable" **Politics:** - "Capitalisme et liberté" - "Gouvernance Le management totalitaire" - "Introduction à l'économie politique" - "Introduction à la politique comparée" - "L anarchisme de droite" - "Le socialisme démocratique" - "Les relations internationales" **Medical:** - "Clinique de l'écriture" - "Introduction à l'étude de la médecine expérimentale" - "Physiologie et thérapie" **Economics and Finance:** - "Comprendre léconomie et la finance" - "Discours sur la Dette" - "Ecologie et capitalisme" - "Economie monétaire Théories et politiques" - "Etat du monde" - "Introduction à l'économie" - "Le Magicien de la finance" - "Les seigneurs de l'argent_ Des Médicis au Bitcoin" **Law:** - "Droit des contrats spéciaux" - "Droit international des relations diplomatiques" - "Droit pénal général" - "Le globe et la loi" **Literature:** - "Histoire littéraire d'Italie 4" - "La Préparation du roman" - "Le Démon de la théorie" - "Les Origines de la Culture" **Musical:** - " Introduction au langage musical " **Philosophy:** - "Introduction à la métaphysique de Maurice Blondel" - "Introduction à la pensée chinoise" - "Introduction à la philosophie analytics" - "Introduction à la philosophie de l'histoire" - "Libertés et droits fondamentaux" **Media:** - "Les médias sociaux en entreprise"
ProfessorBob/E5-finetune-dataset
[ "region:us" ]
2024-01-23T12:47:29+00:00
{"dataset_info": [{"config_name": "english", "features": [{"name": "query", "dtype": "string"}, {"name": "passage", "dtype": "string"}, {"name": "source", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1147790406, "num_examples": 477830}, {"name": "test", "num_bytes": 137615402, "num_examples": 50232}], "download_size": 435028273, "dataset_size": 1285405808}, {"config_name": "fr", "features": [{"name": "query", "dtype": "string"}, {"name": "passage", "dtype": "string"}, {"name": "source", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1112381997, "num_examples": 372410}], "download_size": 234237009, "dataset_size": 1112381997}], "configs": [{"config_name": "english", "data_files": [{"split": "train", "path": "english/train-*"}, {"split": "test", "path": "english/test-*"}]}, {"config_name": "fr", "data_files": [{"split": "train", "path": "fr/train-*"}]}]}
2024-01-30T12:02:09+00:00
[]
[]
TAGS #region-us
E5-finetune Dataset =================== E5-finetune Dataset is a curated collection of query-passage pairs, encompassing a total of 870k examples. This dataset is specifically designed for fine-tuning models to extend their input length capabilities from 512 tokens to 1024 tokens. The primary focus is on accumulating long-context passages. Dataset in English ------------------ The dataset samples long-context passage examples from various sources, ensuring a rich and diverse collection. The sources include: * SQuAD: Approximately 80k examples. Adjacent passages have been merged to form longer passages, suitable for extended input length training. * Natural Question: short passage dataset * robust04: A collection of (question, passage) pairs from news sources, filtered specifically to retain long-context examples. * wikihow: (summary, passage) from wikihow * eli5: short passage dataset Dataset in French ----------------- The existing french dataset is very limited, LLM generation method is used to expand the dataset. To generate (question, passage) dataset with LLM: 1. Gather a set of pure texts of different sources. 2. Ask LLM to generate questions based on the give texts. * LLM generated examples + textbook and novels: These sources provide a rich narrative and educational context, offering a wide range of topics and themes. + wikipedia: Wikipedia articles contribute significantly to the breadth of the dataset. * OpenSource examples + FQuAD: A French question-answering dataset, known for its quality and reliability. + Piaf: A dataset tailored for question-answering systems, focusing on French language intricacies. + wikihow: The French version of WikiHow offers practical, instructional content, adding another dimension to the dataset. Dataset summary --------------- ### Specific doomains The textbook dataset generated with LLM in French covers large academical domains. Here I list the name of the book grouped by its domain. History: * "Contre-histoire du libéralisme" * "Histoire de l'Émigration pendant la Révolution Française" * "Histoire de la littérature française." * "Histoire des mouvements sociaux en France" * "Histoire du surréalisme" * "La guerre froide" * "Les Chaînes de l'Esclavage" * "Les Femmes Avant le Patriarcat" * "Patrimoine\_ une histoire vraie" Scientific: * "Anthropologie" * "Classes préparatoires" * "Fondamentaux de la vie sociale" * "Histoire de la Physique et Chimie" * "Le carbone renouvelable" Politics: * "Capitalisme et liberté" * "Gouvernance Le management totalitaire" * "Introduction à l'économie politique" * "Introduction à la politique comparée" * "L anarchisme de droite" * "Le socialisme démocratique" * "Les relations internationales" Medical: * "Clinique de l'écriture" * "Introduction à l'étude de la médecine expérimentale" * "Physiologie et thérapie" Economics and Finance: * "Comprendre léconomie et la finance" * "Discours sur la Dette" * "Ecologie et capitalisme" * "Economie monétaire Théories et politiques" * "Etat du monde" * "Introduction à l'économie" * "Le Magicien de la finance" * "Les seigneurs de l'argent\_ Des Médicis au Bitcoin" Law: * "Droit des contrats spéciaux" * "Droit international des relations diplomatiques" * "Droit pénal général" * "Le globe et la loi" Literature: * "Histoire littéraire d'Italie 4" * "La Préparation du roman" * "Le Démon de la théorie" * "Les Origines de la Culture" Musical: * " Introduction au langage musical " Philosophy: * "Introduction à la métaphysique de Maurice Blondel" * "Introduction à la pensée chinoise" * "Introduction à la philosophie analytics" * "Introduction à la philosophie de l'histoire" * "Libertés et droits fondamentaux" Media: * "Les médias sociaux en entreprise"
[ "### Specific doomains\n\n\nThe textbook dataset generated with LLM in French covers large academical domains. Here I list the name of the book grouped by its domain.\n\n\nHistory:\n\n\n* \"Contre-histoire du libéralisme\"\n* \"Histoire de l'Émigration pendant la Révolution Française\"\n* \"Histoire de la littérature française.\"\n* \"Histoire des mouvements sociaux en France\"\n* \"Histoire du surréalisme\"\n* \"La guerre froide\"\n* \"Les Chaînes de l'Esclavage\"\n* \"Les Femmes Avant le Patriarcat\"\n* \"Patrimoine\\_ une histoire vraie\"\n\n\nScientific:\n\n\n* \"Anthropologie\"\n* \"Classes préparatoires\"\n* \"Fondamentaux de la vie sociale\"\n* \"Histoire de la Physique et Chimie\"\n* \"Le carbone renouvelable\"\n\n\nPolitics:\n\n\n* \"Capitalisme et liberté\"\n* \"Gouvernance Le management totalitaire\"\n* \"Introduction à l'économie politique\"\n* \"Introduction à la politique comparée\"\n* \"L anarchisme de droite\"\n* \"Le socialisme démocratique\"\n* \"Les relations internationales\"\n\n\nMedical:\n\n\n* \"Clinique de l'écriture\"\n* \"Introduction à l'étude de la médecine expérimentale\"\n* \"Physiologie et thérapie\"\n\n\nEconomics and Finance:\n\n\n* \"Comprendre léconomie et la finance\"\n* \"Discours sur la Dette\"\n* \"Ecologie et capitalisme\"\n* \"Economie monétaire Théories et politiques\"\n* \"Etat du monde\"\n* \"Introduction à l'économie\"\n* \"Le Magicien de la finance\"\n* \"Les seigneurs de l'argent\\_ Des Médicis au Bitcoin\"\n\n\nLaw:\n\n\n* \"Droit des contrats spéciaux\"\n* \"Droit international des relations diplomatiques\"\n* \"Droit pénal général\"\n* \"Le globe et la loi\"\n\n\nLiterature:\n\n\n* \"Histoire littéraire d'Italie 4\"\n* \"La Préparation du roman\"\n* \"Le Démon de la théorie\"\n* \"Les Origines de la Culture\"\n\n\nMusical:\n\n\n* \" Introduction au langage musical \"\n\n\nPhilosophy:\n\n\n* \"Introduction à la métaphysique de Maurice Blondel\"\n* \"Introduction à la pensée chinoise\"\n* \"Introduction à la philosophie analytics\"\n* \"Introduction à la philosophie de l'histoire\"\n* \"Libertés et droits fondamentaux\"\n\n\nMedia:\n\n\n* \"Les médias sociaux en entreprise\"" ]
[ "TAGS\n#region-us \n", "### Specific doomains\n\n\nThe textbook dataset generated with LLM in French covers large academical domains. Here I list the name of the book grouped by its domain.\n\n\nHistory:\n\n\n* \"Contre-histoire du libéralisme\"\n* \"Histoire de l'Émigration pendant la Révolution Française\"\n* \"Histoire de la littérature française.\"\n* \"Histoire des mouvements sociaux en France\"\n* \"Histoire du surréalisme\"\n* \"La guerre froide\"\n* \"Les Chaînes de l'Esclavage\"\n* \"Les Femmes Avant le Patriarcat\"\n* \"Patrimoine\\_ une histoire vraie\"\n\n\nScientific:\n\n\n* \"Anthropologie\"\n* \"Classes préparatoires\"\n* \"Fondamentaux de la vie sociale\"\n* \"Histoire de la Physique et Chimie\"\n* \"Le carbone renouvelable\"\n\n\nPolitics:\n\n\n* \"Capitalisme et liberté\"\n* \"Gouvernance Le management totalitaire\"\n* \"Introduction à l'économie politique\"\n* \"Introduction à la politique comparée\"\n* \"L anarchisme de droite\"\n* \"Le socialisme démocratique\"\n* \"Les relations internationales\"\n\n\nMedical:\n\n\n* \"Clinique de l'écriture\"\n* \"Introduction à l'étude de la médecine expérimentale\"\n* \"Physiologie et thérapie\"\n\n\nEconomics and Finance:\n\n\n* \"Comprendre léconomie et la finance\"\n* \"Discours sur la Dette\"\n* \"Ecologie et capitalisme\"\n* \"Economie monétaire Théories et politiques\"\n* \"Etat du monde\"\n* \"Introduction à l'économie\"\n* \"Le Magicien de la finance\"\n* \"Les seigneurs de l'argent\\_ Des Médicis au Bitcoin\"\n\n\nLaw:\n\n\n* \"Droit des contrats spéciaux\"\n* \"Droit international des relations diplomatiques\"\n* \"Droit pénal général\"\n* \"Le globe et la loi\"\n\n\nLiterature:\n\n\n* \"Histoire littéraire d'Italie 4\"\n* \"La Préparation du roman\"\n* \"Le Démon de la théorie\"\n* \"Les Origines de la Culture\"\n\n\nMusical:\n\n\n* \" Introduction au langage musical \"\n\n\nPhilosophy:\n\n\n* \"Introduction à la métaphysique de Maurice Blondel\"\n* \"Introduction à la pensée chinoise\"\n* \"Introduction à la philosophie analytics\"\n* \"Introduction à la philosophie de l'histoire\"\n* \"Libertés et droits fondamentaux\"\n\n\nMedia:\n\n\n* \"Les médias sociaux en entreprise\"" ]
5f932b523b5fbd37c7221866d27e33cbd11df0ad
The split of SLIMORCA without ORCA convo.
NobodyExistsOnTheInternet/OrcaNoConvo
[ "license:mit", "region:us" ]
2024-01-23T12:47:34+00:00
{"license": "mit"}
2024-01-23T12:49:50+00:00
[]
[]
TAGS #license-mit #region-us
The split of SLIMORCA without ORCA convo.
[]
[ "TAGS\n#license-mit #region-us \n" ]
fb6f333f9ac3714679f38411db1a7d3fd0ca6b21
# Dataset Card for Evaluation run of Charlie911/MultiLora-drop-sharegpt <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Charlie911/MultiLora-drop-sharegpt](https://huggingface.co/Charlie911/MultiLora-drop-sharegpt) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Charlie911__MultiLora-drop-sharegpt", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T20:13:52.401722](https://huggingface.co/datasets/open-llm-leaderboard/details_Charlie911__MultiLora-drop-sharegpt/blob/main/results_2024-01-23T20-13-52.401722.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4103189833137969, "acc_stderr": 0.034440356205935406, "acc_norm": 0.4152994817197388, "acc_norm_stderr": 0.03526445965091106, "mc1": 0.3047735618115055, "mc1_stderr": 0.016114124156882455, "mc2": 0.44825405044907884, "mc2_stderr": 0.014892271476699756 }, "harness|arc:challenge|25": { "acc": 0.4445392491467577, "acc_stderr": 0.014521226405627077, "acc_norm": 0.4761092150170648, "acc_norm_stderr": 0.014594701798071655 }, "harness|hellaswag|10": { "acc": 0.49302927703644694, "acc_stderr": 0.004989296471157074, "acc_norm": 0.6597291376219877, "acc_norm_stderr": 0.0047283185778352246 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4148148148148148, "acc_stderr": 0.04256193767901407, "acc_norm": 0.4148148148148148, "acc_norm_stderr": 0.04256193767901407 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.40131578947368424, "acc_stderr": 0.039889037033362836, "acc_norm": 0.40131578947368424, "acc_norm_stderr": 0.039889037033362836 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.46037735849056605, "acc_stderr": 0.030676096599389188, "acc_norm": 0.46037735849056605, "acc_norm_stderr": 0.030676096599389188 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3680555555555556, "acc_stderr": 0.04032999053960718, "acc_norm": 0.3680555555555556, "acc_norm_stderr": 0.04032999053960718 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4046242774566474, "acc_stderr": 0.03742461193887248, "acc_norm": 0.4046242774566474, "acc_norm_stderr": 0.03742461193887248 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171453, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171453 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.43829787234042555, "acc_stderr": 0.03243618636108101, "acc_norm": 0.43829787234042555, "acc_norm_stderr": 0.03243618636108101 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.21929824561403508, "acc_stderr": 0.03892431106518754, "acc_norm": 0.21929824561403508, "acc_norm_stderr": 0.03892431106518754 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.33793103448275863, "acc_stderr": 0.0394170763206489, "acc_norm": 0.33793103448275863, "acc_norm_stderr": 0.0394170763206489 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2619047619047619, "acc_stderr": 0.022644212615525214, "acc_norm": 0.2619047619047619, "acc_norm_stderr": 0.022644212615525214 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.38095238095238093, "acc_stderr": 0.04343525428949098, "acc_norm": 0.38095238095238093, "acc_norm_stderr": 0.04343525428949098 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.49032258064516127, "acc_stderr": 0.02843867799890955, "acc_norm": 0.49032258064516127, "acc_norm_stderr": 0.02843867799890955 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.32019704433497537, "acc_stderr": 0.032826493853041504, "acc_norm": 0.32019704433497537, "acc_norm_stderr": 0.032826493853041504 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.46060606060606063, "acc_stderr": 0.03892207016552012, "acc_norm": 0.46060606060606063, "acc_norm_stderr": 0.03892207016552012 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5, "acc_stderr": 0.035623524993954825, "acc_norm": 0.5, "acc_norm_stderr": 0.035623524993954825 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.5233160621761658, "acc_stderr": 0.03604513672442202, "acc_norm": 0.5233160621761658, "acc_norm_stderr": 0.03604513672442202 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.36923076923076925, "acc_stderr": 0.02446861524147891, "acc_norm": 0.36923076923076925, "acc_norm_stderr": 0.02446861524147891 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2814814814814815, "acc_stderr": 0.027420019350945277, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.027420019350945277 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4369747899159664, "acc_stderr": 0.032219436365661956, "acc_norm": 0.4369747899159664, "acc_norm_stderr": 0.032219436365661956 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2980132450331126, "acc_stderr": 0.037345356767871984, "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.037345356767871984 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.4917431192660551, "acc_stderr": 0.021434399918214334, "acc_norm": 0.4917431192660551, "acc_norm_stderr": 0.021434399918214334 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4027777777777778, "acc_stderr": 0.033448873829978666, "acc_norm": 0.4027777777777778, "acc_norm_stderr": 0.033448873829978666 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.5147058823529411, "acc_stderr": 0.035077938347913236, "acc_norm": 0.5147058823529411, "acc_norm_stderr": 0.035077938347913236 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.569620253164557, "acc_stderr": 0.032230171959375976, "acc_norm": 0.569620253164557, "acc_norm_stderr": 0.032230171959375976 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.3721973094170404, "acc_stderr": 0.03244305283008731, "acc_norm": 0.3721973094170404, "acc_norm_stderr": 0.03244305283008731 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5190839694656488, "acc_stderr": 0.04382094705550989, "acc_norm": 0.5190839694656488, "acc_norm_stderr": 0.04382094705550989 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5041322314049587, "acc_stderr": 0.04564198767432754, "acc_norm": 0.5041322314049587, "acc_norm_stderr": 0.04564198767432754 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.4537037037037037, "acc_stderr": 0.04812917324536821, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.04812917324536821 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3987730061349693, "acc_stderr": 0.03847021420456026, "acc_norm": 0.3987730061349693, "acc_norm_stderr": 0.03847021420456026 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.33035714285714285, "acc_stderr": 0.04464285714285714, "acc_norm": 0.33035714285714285, "acc_norm_stderr": 0.04464285714285714 }, "harness|hendrycksTest-management|5": { "acc": 0.5145631067961165, "acc_stderr": 0.049486373240266356, "acc_norm": 0.5145631067961165, "acc_norm_stderr": 0.049486373240266356 }, "harness|hendrycksTest-marketing|5": { "acc": 0.5512820512820513, "acc_stderr": 0.032583346493868806, "acc_norm": 0.5512820512820513, "acc_norm_stderr": 0.032583346493868806 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5351213282247765, "acc_stderr": 0.017835798806290645, "acc_norm": 0.5351213282247765, "acc_norm_stderr": 0.017835798806290645 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.40173410404624277, "acc_stderr": 0.02639410417764363, "acc_norm": 0.40173410404624277, "acc_norm_stderr": 0.02639410417764363 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.27262569832402234, "acc_stderr": 0.014893391735249594, "acc_norm": 0.27262569832402234, "acc_norm_stderr": 0.014893391735249594 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.4444444444444444, "acc_stderr": 0.02845263998508801, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.02845263998508801 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.4630225080385852, "acc_stderr": 0.028320325830105908, "acc_norm": 0.4630225080385852, "acc_norm_stderr": 0.028320325830105908 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.404320987654321, "acc_stderr": 0.02730662529732769, "acc_norm": 0.404320987654321, "acc_norm_stderr": 0.02730662529732769 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3049645390070922, "acc_stderr": 0.027464708442022128, "acc_norm": 0.3049645390070922, "acc_norm_stderr": 0.027464708442022128 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3409387222946545, "acc_stderr": 0.01210681720306721, "acc_norm": 0.3409387222946545, "acc_norm_stderr": 0.01210681720306721 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4852941176470588, "acc_stderr": 0.03035969707904612, "acc_norm": 0.4852941176470588, "acc_norm_stderr": 0.03035969707904612 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.3366013071895425, "acc_stderr": 0.019117213911495158, "acc_norm": 0.3366013071895425, "acc_norm_stderr": 0.019117213911495158 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.4, "acc_stderr": 0.0469237132203465, "acc_norm": 0.4, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5224489795918368, "acc_stderr": 0.031976941187136725, "acc_norm": 0.5224489795918368, "acc_norm_stderr": 0.031976941187136725 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5621890547263682, "acc_stderr": 0.0350808011219984, "acc_norm": 0.5621890547263682, "acc_norm_stderr": 0.0350808011219984 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-virology|5": { "acc": 0.3674698795180723, "acc_stderr": 0.03753267402120574, "acc_norm": 0.3674698795180723, "acc_norm_stderr": 0.03753267402120574 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.5497076023391813, "acc_stderr": 0.038158273659132366, "acc_norm": 0.5497076023391813, "acc_norm_stderr": 0.038158273659132366 }, "harness|truthfulqa:mc|0": { "mc1": 0.3047735618115055, "mc1_stderr": 0.016114124156882455, "mc2": 0.44825405044907884, "mc2_stderr": 0.014892271476699756 }, "harness|winogrande|5": { "acc": 0.6606156274664562, "acc_stderr": 0.01330771492894175 }, "harness|gsm8k|5": { "acc": 0.06444275966641395, "acc_stderr": 0.006763391728488269 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Charlie911__MultiLora-drop-sharegpt
[ "region:us" ]
2024-01-23T13:12:08+00:00
{"pretty_name": "Evaluation run of Charlie911/MultiLora-drop-sharegpt", "dataset_summary": "Dataset automatically created during the evaluation run of model [Charlie911/MultiLora-drop-sharegpt](https://huggingface.co/Charlie911/MultiLora-drop-sharegpt) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Charlie911__MultiLora-drop-sharegpt\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T20:13:52.401722](https://huggingface.co/datasets/open-llm-leaderboard/details_Charlie911__MultiLora-drop-sharegpt/blob/main/results_2024-01-23T20-13-52.401722.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4103189833137969,\n \"acc_stderr\": 0.034440356205935406,\n \"acc_norm\": 0.4152994817197388,\n \"acc_norm_stderr\": 0.03526445965091106,\n \"mc1\": 0.3047735618115055,\n \"mc1_stderr\": 0.016114124156882455,\n \"mc2\": 0.44825405044907884,\n \"mc2_stderr\": 0.014892271476699756\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4445392491467577,\n \"acc_stderr\": 0.014521226405627077,\n \"acc_norm\": 0.4761092150170648,\n \"acc_norm_stderr\": 0.014594701798071655\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.49302927703644694,\n \"acc_stderr\": 0.004989296471157074,\n \"acc_norm\": 0.6597291376219877,\n \"acc_norm_stderr\": 0.0047283185778352246\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4148148148148148,\n \"acc_stderr\": 0.04256193767901407,\n \"acc_norm\": 0.4148148148148148,\n \"acc_norm_stderr\": 0.04256193767901407\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.40131578947368424,\n \"acc_stderr\": 0.039889037033362836,\n \"acc_norm\": 0.40131578947368424,\n \"acc_norm_stderr\": 0.039889037033362836\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.46037735849056605,\n \"acc_stderr\": 0.030676096599389188,\n \"acc_norm\": 0.46037735849056605,\n \"acc_norm_stderr\": 0.030676096599389188\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3680555555555556,\n \"acc_stderr\": 0.04032999053960718,\n \"acc_norm\": 0.3680555555555556,\n \"acc_norm_stderr\": 0.04032999053960718\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4046242774566474,\n \"acc_stderr\": 0.03742461193887248,\n \"acc_norm\": 0.4046242774566474,\n \"acc_norm_stderr\": 0.03742461193887248\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171453,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171453\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.43829787234042555,\n \"acc_stderr\": 0.03243618636108101,\n \"acc_norm\": 0.43829787234042555,\n \"acc_norm_stderr\": 0.03243618636108101\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.21929824561403508,\n \"acc_stderr\": 0.03892431106518754,\n \"acc_norm\": 0.21929824561403508,\n \"acc_norm_stderr\": 0.03892431106518754\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.33793103448275863,\n \"acc_stderr\": 0.0394170763206489,\n \"acc_norm\": 0.33793103448275863,\n \"acc_norm_stderr\": 0.0394170763206489\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2619047619047619,\n \"acc_stderr\": 0.022644212615525214,\n \"acc_norm\": 0.2619047619047619,\n \"acc_norm_stderr\": 0.022644212615525214\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.38095238095238093,\n \"acc_stderr\": 0.04343525428949098,\n \"acc_norm\": 0.38095238095238093,\n \"acc_norm_stderr\": 0.04343525428949098\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.49032258064516127,\n \"acc_stderr\": 0.02843867799890955,\n \"acc_norm\": 0.49032258064516127,\n \"acc_norm_stderr\": 0.02843867799890955\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.32019704433497537,\n \"acc_stderr\": 0.032826493853041504,\n \"acc_norm\": 0.32019704433497537,\n \"acc_norm_stderr\": 0.032826493853041504\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.46060606060606063,\n \"acc_stderr\": 0.03892207016552012,\n \"acc_norm\": 0.46060606060606063,\n \"acc_norm_stderr\": 0.03892207016552012\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.035623524993954825,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.035623524993954825\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.5233160621761658,\n \"acc_stderr\": 0.03604513672442202,\n \"acc_norm\": 0.5233160621761658,\n \"acc_norm_stderr\": 0.03604513672442202\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.36923076923076925,\n \"acc_stderr\": 0.02446861524147891,\n \"acc_norm\": 0.36923076923076925,\n \"acc_norm_stderr\": 0.02446861524147891\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2814814814814815,\n \"acc_stderr\": 0.027420019350945277,\n \"acc_norm\": 0.2814814814814815,\n \"acc_norm_stderr\": 0.027420019350945277\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.4369747899159664,\n \"acc_stderr\": 0.032219436365661956,\n \"acc_norm\": 0.4369747899159664,\n \"acc_norm_stderr\": 0.032219436365661956\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2980132450331126,\n \"acc_stderr\": 0.037345356767871984,\n \"acc_norm\": 0.2980132450331126,\n \"acc_norm_stderr\": 0.037345356767871984\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.4917431192660551,\n \"acc_stderr\": 0.021434399918214334,\n \"acc_norm\": 0.4917431192660551,\n \"acc_norm_stderr\": 0.021434399918214334\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4027777777777778,\n \"acc_stderr\": 0.033448873829978666,\n \"acc_norm\": 0.4027777777777778,\n \"acc_norm_stderr\": 0.033448873829978666\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.5147058823529411,\n \"acc_stderr\": 0.035077938347913236,\n \"acc_norm\": 0.5147058823529411,\n \"acc_norm_stderr\": 0.035077938347913236\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.569620253164557,\n \"acc_stderr\": 0.032230171959375976,\n \"acc_norm\": 0.569620253164557,\n \"acc_norm_stderr\": 0.032230171959375976\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3721973094170404,\n \"acc_stderr\": 0.03244305283008731,\n \"acc_norm\": 0.3721973094170404,\n \"acc_norm_stderr\": 0.03244305283008731\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5190839694656488,\n \"acc_stderr\": 0.04382094705550989,\n \"acc_norm\": 0.5190839694656488,\n \"acc_norm_stderr\": 0.04382094705550989\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.5041322314049587,\n \"acc_stderr\": 0.04564198767432754,\n \"acc_norm\": 0.5041322314049587,\n \"acc_norm_stderr\": 0.04564198767432754\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.4537037037037037,\n \"acc_stderr\": 0.04812917324536821,\n \"acc_norm\": 0.4537037037037037,\n \"acc_norm_stderr\": 0.04812917324536821\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.3987730061349693,\n \"acc_stderr\": 0.03847021420456026,\n \"acc_norm\": 0.3987730061349693,\n \"acc_norm_stderr\": 0.03847021420456026\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.33035714285714285,\n \"acc_stderr\": 0.04464285714285714,\n \"acc_norm\": 0.33035714285714285,\n \"acc_norm_stderr\": 0.04464285714285714\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.5145631067961165,\n \"acc_stderr\": 0.049486373240266356,\n \"acc_norm\": 0.5145631067961165,\n \"acc_norm_stderr\": 0.049486373240266356\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.5512820512820513,\n \"acc_stderr\": 0.032583346493868806,\n \"acc_norm\": 0.5512820512820513,\n \"acc_norm_stderr\": 0.032583346493868806\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5351213282247765,\n \"acc_stderr\": 0.017835798806290645,\n \"acc_norm\": 0.5351213282247765,\n \"acc_norm_stderr\": 0.017835798806290645\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.40173410404624277,\n \"acc_stderr\": 0.02639410417764363,\n \"acc_norm\": 0.40173410404624277,\n \"acc_norm_stderr\": 0.02639410417764363\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.27262569832402234,\n \"acc_stderr\": 0.014893391735249594,\n \"acc_norm\": 0.27262569832402234,\n \"acc_norm_stderr\": 0.014893391735249594\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.02845263998508801,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.02845263998508801\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.4630225080385852,\n \"acc_stderr\": 0.028320325830105908,\n \"acc_norm\": 0.4630225080385852,\n \"acc_norm_stderr\": 0.028320325830105908\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.404320987654321,\n \"acc_stderr\": 0.02730662529732769,\n \"acc_norm\": 0.404320987654321,\n \"acc_norm_stderr\": 0.02730662529732769\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3049645390070922,\n \"acc_stderr\": 0.027464708442022128,\n \"acc_norm\": 0.3049645390070922,\n \"acc_norm_stderr\": 0.027464708442022128\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3409387222946545,\n \"acc_stderr\": 0.01210681720306721,\n \"acc_norm\": 0.3409387222946545,\n \"acc_norm_stderr\": 0.01210681720306721\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4852941176470588,\n \"acc_stderr\": 0.03035969707904612,\n \"acc_norm\": 0.4852941176470588,\n \"acc_norm_stderr\": 0.03035969707904612\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.3366013071895425,\n \"acc_stderr\": 0.019117213911495158,\n \"acc_norm\": 0.3366013071895425,\n \"acc_norm_stderr\": 0.019117213911495158\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5224489795918368,\n \"acc_stderr\": 0.031976941187136725,\n \"acc_norm\": 0.5224489795918368,\n \"acc_norm_stderr\": 0.031976941187136725\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5621890547263682,\n \"acc_stderr\": 0.0350808011219984,\n \"acc_norm\": 0.5621890547263682,\n \"acc_norm_stderr\": 0.0350808011219984\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3674698795180723,\n \"acc_stderr\": 0.03753267402120574,\n \"acc_norm\": 0.3674698795180723,\n \"acc_norm_stderr\": 0.03753267402120574\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.5497076023391813,\n \"acc_stderr\": 0.038158273659132366,\n \"acc_norm\": 0.5497076023391813,\n \"acc_norm_stderr\": 0.038158273659132366\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3047735618115055,\n \"mc1_stderr\": 0.016114124156882455,\n \"mc2\": 0.44825405044907884,\n \"mc2_stderr\": 0.014892271476699756\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6606156274664562,\n \"acc_stderr\": 0.01330771492894175\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.06444275966641395,\n \"acc_stderr\": 0.006763391728488269\n }\n}\n```", "repo_url": "https://huggingface.co/Charlie911/MultiLora-drop-sharegpt", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|arc:challenge|25_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|arc:challenge|25_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|gsm8k|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|gsm8k|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hellaswag|10_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hellaswag|10_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T13-09-40.309732.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T20-13-52.401722.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["**/details_harness|winogrande|5_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["**/details_harness|winogrande|5_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T20-13-52.401722.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T13_09_40.309732", "path": ["results_2024-01-23T13-09-40.309732.parquet"]}, {"split": "2024_01_23T20_13_52.401722", "path": ["results_2024-01-23T20-13-52.401722.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T20-13-52.401722.parquet"]}]}]}
2024-01-23T20:16:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Charlie911/MultiLora-drop-sharegpt Dataset automatically created during the evaluation run of model Charlie911/MultiLora-drop-sharegpt on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T20:13:52.401722(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Charlie911/MultiLora-drop-sharegpt\n\n\n\nDataset automatically created during the evaluation run of model Charlie911/MultiLora-drop-sharegpt on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T20:13:52.401722(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Charlie911/MultiLora-drop-sharegpt\n\n\n\nDataset automatically created during the evaluation run of model Charlie911/MultiLora-drop-sharegpt on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T20:13:52.401722(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
298f317a7dce9b1fa0412ba68650ce56553fe25d
# Testing Datasets ### How to use the dataset ```python from datasets import load_dataset # load audio print("loading audio") ds_audio = load_dataset("anotherdev/testing-datasets", data_dir="files/audio") print(ds_audio) # load image print("loading images") ds_image = load_dataset("anotherdev/testing-datasets", data_dir="files/image") print(ds_image) # load text print("loading text") ds_text = load_dataset("anotherdev/testing-datasets", data_dir="files/text") print(ds_text) # load instruct print("loading instruct") ds_instr = load_dataset("anotherdev/testing-datasets", data_dir="files/instruct") print(ds_instr) ```
anotherdev/testing-datasets
[ "task_categories:other", "task_ids:parsing", "size_categories:0<n<1k", "license:other", "other", "region:us" ]
2024-01-23T13:22:54+00:00
{"license": ["other"], "size_categories": ["0<n<1k"], "task_categories": ["other"], "task_ids": ["parsing"], "pretty_name": "testing datasets in a sandbox this is not a real dataset it is sandbox for testing", "tags": ["other"], "dataset_info": [{"config_name": "audio_class", "features": [{"name": "file_path", "dtype": "string"}, {"name": "audio_path", "dtype": "string"}, {"name": "lang", "dtype": "string"}, {"name": "dbytes_len", "dtype": "int64"}, {"name": "dbytes", "dtype": "binary"}], "splits": [{"name": "audio_class"}]}, {"config_name": "audio_base", "features": [{"name": "file_path", "dtype": "string"}, {"name": "audio_path", "dtype": "string"}, {"name": "lang", "dtype": "string"}, {"name": "dbytes_len", "dtype": "int64"}, {"name": "dbytes", "dtype": "binary"}], "splits": [{"name": "audio_base"}]}, {"config_name": "audio_import", "features": [{"name": "file_path", "dtype": "string"}, {"name": "audio_path", "dtype": "string"}, {"name": "lang", "dtype": "string"}, {"name": "dbytes_len", "dtype": "int64"}, {"name": "dbytes", "dtype": "binary"}], "splits": [{"name": "audio_import"}]}, {"config_name": "audio_function", "features": [{"name": "file_path", "dtype": "string"}, {"name": "audio_path", "dtype": "string"}, {"name": "lang", "dtype": "string"}, {"name": "dbytes_len", "dtype": "int64"}, {"name": "dbytes", "dtype": "binary"}], "splits": [{"name": "audio_function"}]}, {"config_name": "image_base", "features": [{"name": "filename", "dtype": "string"}, {"name": "repo", "dtype": "string"}, {"name": "path", "dtype": "string"}, {"name": "dbytes", "dtype": "binary"}, {"name": "dbytes_len", "dtype": "int64"}, {"name": "dbytes_mb", "dtype": "string"}, {"name": "type", "dtype": "string"}], "splits": [{"name": "image_base"}]}, {"config_name": "image_import", "features": [{"name": "filename", "dtype": "string"}, {"name": "repo", "dtype": "string"}, {"name": "path", "dtype": "string"}, {"name": "dbytes", "dtype": "binary"}, {"name": "dbytes_len", "dtype": "int64"}, {"name": "dbytes_mb", "dtype": "string"}, {"name": "type", "dtype": "string"}], "splits": [{"name": "image_import"}]}, {"config_name": "image_function", "features": [{"name": "filename", "dtype": "string"}, {"name": "repo", "dtype": "string"}, {"name": "path", "dtype": "string"}, {"name": "dbytes", "dtype": "binary"}, {"name": "dbytes_len", "dtype": "int64"}, {"name": "dbytes_mb", "dtype": "string"}, {"name": "type", "dtype": "string"}], "splits": [{"name": "image_function"}]}, {"config_name": "image_class", "features": [{"name": "filename", "dtype": "string"}, {"name": "repo", "dtype": "string"}, {"name": "path", "dtype": "string"}, {"name": "dbytes", "dtype": "binary"}, {"name": "dbytes_len", "dtype": "int64"}, {"name": "dbytes_mb", "dtype": "string"}, {"name": "type", "dtype": "string"}], "splits": [{"name": "image_class"}]}, {"config_name": "text_instruct", "splits": [{"name": "text_instruct"}]}, {"config_name": "text_python", "splits": [{"name": "text_python_ai_research"}, {"name": "text_python_many_repos"}]}], "configs": [{"config_name": "audio_class", "data_files": [{"split": "audio_class", "path": "files/audio/test-audio-class.parquet"}]}, {"config_name": "audio_base", "data_files": [{"split": "audio_base", "path": "files/audio/test-audio-base.parquet"}]}, {"config_name": "audio_import", "data_files": [{"split": "audio_import", "path": "files/audio/test-audio-import.parquet"}]}, {"config_name": "audio_function", "data_files": [{"split": "audio_function", "path": "files/audio/test-audio-function.parquet"}]}, {"config_name": "image_base", "data_files": [{"split": "image_base", "path": "files/image/test-image-base.parquet"}]}, {"config_name": "image_import", "data_files": [{"split": "image_import", "path": "files/image/test-image-import.parquet"}]}, {"config_name": "image-function", "data_files": [{"split": "image_function", "path": "files/image/test-image-function.parquet"}]}, {"config_name": "image-class", "data_files": [{"split": "image_class", "path": "files/image/test-image-class.parquet"}]}, {"config_name": "text_instruct", "data_files": [{"split": "text_instruct", "path": "files/instruct/test-text-instruct.parquet"}]}, {"config_name": "text_python", "data_files": [{"split": "text_python_ai_research", "path": "files/text/test-text-python-ai-research.parquet"}, {"split": "text_python_many_repos", "path": "files/text/test-text-python-many-repos.parquet"}]}]}
2024-01-24T18:04:38+00:00
[]
[]
TAGS #task_categories-other #task_ids-parsing #size_categories-0<n<1k #license-other #other #region-us
# Testing Datasets ### How to use the dataset
[ "# Testing Datasets", "### How to use the dataset" ]
[ "TAGS\n#task_categories-other #task_ids-parsing #size_categories-0<n<1k #license-other #other #region-us \n", "# Testing Datasets", "### How to use the dataset" ]
67fb47b9f386cd178de02af6ba603a002918b736
- **Homepage:** https://www.darrow.ai/ - **Repository:** https://github.com/darrow-labs/LegalLens - **Paper:** https://arxiv.org/pdf/2402.04335.pdf - **Point of Contact:** [Dor Bernsohn](mailto:[email protected]),[Gil Semo](mailto:[email protected]) ## Overview LegalLensNER is a dedicated dataset designed for Named Entity Recognition (NER) in the legal domain, with a specific emphasis on detecting legal violations in unstructured texts. ## Data Fields - id: (int) A unique identifier for each record. - word: (str) The specific word or token in the text. - label: (str) The entity class assigned to the word, one of Law, Violation, Violated By, or Violated On. - start: (int) The starting character index of the word in the text. - end: (int) The ending character index of the word in the text. ## Data Generation The LegalLensNER dataset was generated through a detailed process involving automated data generation with GPT-4 for synthetic data production and manual reviews by experienced legal annotators. ## Collaborations and Contributions The LegalLensNER dataset stands out as a specialized resource for NER tasks within the legal domain, providing an extensive foundation for legal text analysis, information extraction, and fostering the progression of legal NLP research and applications. LegalLensNER is open for further enrichment through contributions and collaborations. Researchers and practitioners with an interest in legal NLP are encouraged to contribute or engage in collaborative projects to enhance the dataset's breadth and depth. ## Curation Rationale The dataset was curated by Darrow.ai (2023). ## Data Instances To access the dataset, the following code snippet can be used: ```python from datasets import load_dataset dataset = load_dataset("darrow-ai/LegalLensNER") ``` ### Citation Information *TBD *LegalLens: Leveraging LLMs for Legal Violation Identification in Unstructured Text* *Proceedings of the 2024 European Chapter of the Association for Computational Linguistics. Malta. 2024* ``` @InProceedings TBD ```
darrow-ai/LegalLensNER
[ "license:apache-2.0", "arxiv:2402.04335", "region:us" ]
2024-01-23T13:24:12+00:00
{"license": "apache-2.0"}
2024-02-15T07:11:54+00:00
[ "2402.04335" ]
[]
TAGS #license-apache-2.0 #arxiv-2402.04335 #region-us
- Homepage: URL - Repository: URL - Paper: URL - Point of Contact: Dor Bernsohn,Gil Semo ## Overview LegalLensNER is a dedicated dataset designed for Named Entity Recognition (NER) in the legal domain, with a specific emphasis on detecting legal violations in unstructured texts. ## Data Fields - id: (int) A unique identifier for each record. - word: (str) The specific word or token in the text. - label: (str) The entity class assigned to the word, one of Law, Violation, Violated By, or Violated On. - start: (int) The starting character index of the word in the text. - end: (int) The ending character index of the word in the text. ## Data Generation The LegalLensNER dataset was generated through a detailed process involving automated data generation with GPT-4 for synthetic data production and manual reviews by experienced legal annotators. ## Collaborations and Contributions The LegalLensNER dataset stands out as a specialized resource for NER tasks within the legal domain, providing an extensive foundation for legal text analysis, information extraction, and fostering the progression of legal NLP research and applications. LegalLensNER is open for further enrichment through contributions and collaborations. Researchers and practitioners with an interest in legal NLP are encouraged to contribute or engage in collaborative projects to enhance the dataset's breadth and depth. ## Curation Rationale The dataset was curated by URL (2023). ## Data Instances To access the dataset, the following code snippet can be used: *TBD *LegalLens: Leveraging LLMs for Legal Violation Identification in Unstructured Text* *Proceedings of the 2024 European Chapter of the Association for Computational Linguistics. Malta. 2024*
[ "## Overview\nLegalLensNER is a dedicated dataset designed for Named Entity Recognition (NER) in the legal domain, with a specific emphasis on detecting legal violations in unstructured texts.", "## Data Fields\n\n- id: (int) A unique identifier for each record.\n- word: (str) The specific word or token in the text.\n- label: (str) The entity class assigned to the word, one of Law, Violation, Violated By, or Violated On.\n- start: (int) The starting character index of the word in the text.\n- end: (int) The ending character index of the word in the text.", "## Data Generation\nThe LegalLensNER dataset was generated through a detailed process involving automated data generation with GPT-4 for synthetic data production and manual reviews by experienced legal annotators.", "## Collaborations and Contributions\nThe LegalLensNER dataset stands out as a specialized resource for NER tasks within the legal domain, providing an extensive foundation for legal text analysis, information extraction, and fostering the progression of legal NLP research and applications.\nLegalLensNER is open for further enrichment through contributions and collaborations. Researchers and practitioners with an interest in legal NLP are encouraged to contribute or engage in collaborative projects to enhance the dataset's breadth and depth.", "## Curation Rationale\nThe dataset was curated by URL (2023).", "## Data Instances\nTo access the dataset, the following code snippet can be used:\n\n\n\n\n*TBD \n*LegalLens: Leveraging LLMs for Legal Violation Identification in Unstructured Text*\n*Proceedings of the 2024 European Chapter of the Association for Computational Linguistics. Malta. 2024*" ]
[ "TAGS\n#license-apache-2.0 #arxiv-2402.04335 #region-us \n", "## Overview\nLegalLensNER is a dedicated dataset designed for Named Entity Recognition (NER) in the legal domain, with a specific emphasis on detecting legal violations in unstructured texts.", "## Data Fields\n\n- id: (int) A unique identifier for each record.\n- word: (str) The specific word or token in the text.\n- label: (str) The entity class assigned to the word, one of Law, Violation, Violated By, or Violated On.\n- start: (int) The starting character index of the word in the text.\n- end: (int) The ending character index of the word in the text.", "## Data Generation\nThe LegalLensNER dataset was generated through a detailed process involving automated data generation with GPT-4 for synthetic data production and manual reviews by experienced legal annotators.", "## Collaborations and Contributions\nThe LegalLensNER dataset stands out as a specialized resource for NER tasks within the legal domain, providing an extensive foundation for legal text analysis, information extraction, and fostering the progression of legal NLP research and applications.\nLegalLensNER is open for further enrichment through contributions and collaborations. Researchers and practitioners with an interest in legal NLP are encouraged to contribute or engage in collaborative projects to enhance the dataset's breadth and depth.", "## Curation Rationale\nThe dataset was curated by URL (2023).", "## Data Instances\nTo access the dataset, the following code snippet can be used:\n\n\n\n\n*TBD \n*LegalLens: Leveraging LLMs for Legal Violation Identification in Unstructured Text*\n*Proceedings of the 2024 European Chapter of the Association for Computational Linguistics. Malta. 2024*" ]
0e383204f0d0c59b1ba4e89fff82c20e52e8d004
# CuBERT ETH150 Open Benchmarks This is an unofficial HuggingFace upload of the [CuBERT ETH150 Open Benchmarks](https://github.com/google-research/google-research/tree/master/cubert). This dataset was released along with [Learning and Evaluating Contextual Embedding of Source Code](https://arxiv.org/abs/2001.00059). --- ## Benchmarks and Fine-Tuned Models Here we describe the 6 Python benchmarks we created. All 6 benchmarks were derived from [ETH Py150 Open](https://github.com/google-research-datasets/eth_py150_open). All examples are stored as sharded text files. Each text line corresponds to a separate example encoded as a JSON object. For each dataset, we release separate training/validation/testing splits along the same boundaries that ETH Py150 Open splits its files to the corresponding splits. The fine-tuned models are the checkpoints of each model with the highest validation accuracy. 1. **Function-docstring classification**. Combinations of functions with their correct or incorrect documentation string, used to train a classifier that can tell which pairs go together. The JSON fields are: * `function`: string, the source code of a function as text * `docstring`: string, the documentation string for that function. Note that the string is unquoted. To be able to properly tokenize it with the CuBERT tokenizers, you have to wrap it in quotes first. For example, in Python, use `string_to_tokenize = f'"""{docstring}"""'`. * `label`: string, one of (“Incorrect”, “Correct”), the label of the example. * `info`: string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function name and, for “Incorrect” examples, the function whose docstring was substituted. 1. **Exception classification**. Combinations of functions where one exception type has been masked, along with a label indicating the masked exception type. The JSON fields are: * `function`: string, the source code of a function as text, in which one exception type has been replaced with the special token “__HOLE__” * `label`: string, one of (`ValueError`, `KeyError`, `AttributeError`, `TypeError`, `OSError`, `IOError`, `ImportError`, `IndexError`, `DoesNotExist`, `KeyboardInterrupt`, `StopIteration`, `AssertionError`, `SystemExit`, `RuntimeError`, `HTTPError`, `UnicodeDecodeError`, `NotImplementedError`, `ValidationError`, `ObjectDoesNotExist`, `NameError`, `None`), the masked exception type. Note that `None` never occurs in the data and will be removed in a future release. * `info`: string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, and the fully-qualified function name. 1. **Variable-misuse classification**. Combinations of functions where one use of a variable may have been replaced with another variable defined in the same context, along with a label indicating if this bug-injection has occurred. The JSON fields are: * `function`: string, the source code of a function as text. * `label`: string, one of (“Correct”, “Variable misuse”) indicating if this is a buggy or bug-free example. * `info`: string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the variable substitution that has occurred (e.g., “correct_variable” → “incorrect_variable”). 1. **Swapped-operand classification**. Combinations of functions where one use binary operator’s arguments have been swapped, to create a buggy example, or left undisturbed, along with a label indicating if this bug-injection has occurred. The JSON fields are: * `function`: string, the source code of a function as text. * `label`: string, one of (“Correct”, “Swapped operands”) indicating if this is a buggy or bug-free example. * `info`: string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the operand swap has occurred (e.g., “swapped operands of `not in`”). 1. **Wrong-binary-operator classification**. Combinations of functions where one binary operator has been swapped with another, to create a buggy example, or left undisturbed, along with a label indicating if this bug-injection has occurred. The JSON fields are: * `function`: string, the source code of a function as text. * `label`: string, one of (“Correct”, “Wrong binary operator”) indicating if this is a buggy or bug-free example. * `info`: string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the operator replacement has occurred (e.g., “`==`-> `!=`”). 1. **Variable-misuse localization and repair**. Combinations of functions where one use of a variable may have been replaced with another variable defined in the same context, along with information that can be used to localize and repair the bug, as well as the location of the bug if such a bug exists. The JSON fields are: * `function`: a list of strings, the source code of a function, tokenized with the vocabulary from item b. Note that, unlike other task datasets, this dataset gives a tokenized function, rather than the code as a single string. * `target_mask`: a list of integers (0 or 1). If the integer at some position is 1, then the token at the corresponding position of the function token list is a correct repair for the introduced bug. If a variable has been split into multiple tokens, only the first subtoken is marked in this mask. If the example is bug-free, all integers are 0. * `error_location_mask`: a list of integers (0 or 1). If the integer at some position is 1, then there is a variable-misuse bug at the corresponding location of the tokenized function. In a bug-free example, the first integer is 1. There is exactly one integer set to 1 for all examples. If a variable has been split into multiple tokens, only the first subtoken is marked in this mask. * `candidate_mask`: a list of integers (0 or 1). If the integer at some position is 1, then the variable starting at that position in the tokenized function is a candidate to consider when repairing a bug. Candidates are all variables defined in the function parameters or via variable declarations in the function. If a variable has been split into multiple tokens, only the first subtoken is marked in this mask, for each candidate. * `provenance`: string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the buggy/repair token positions and variables (e.g., “16/18 `kwargs` → `self`”). 16 is the position of the introduced error, 18 is the location of the repair. ## Citation ```bibtex @inproceedings{cubert, author = {Aditya Kanade and Petros Maniatis and Gogul Balakrishnan and Kensen Shi}, title = {Learning and evaluating contextual embedding of source code}, booktitle = {Proceedings of the 37th International Conference on Machine Learning, {ICML} 2020, 12-18 July 2020}, series = {Proceedings of Machine Learning Research}, publisher = {{PMLR}}, year = {2020}, } ```
claudios/cubert_ETHPy150Open
[ "task_categories:text-classification", "license:apache-2.0", "code", "arxiv:2001.00059", "region:us" ]
2024-01-23T13:24:49+00:00
{"license": "apache-2.0", "task_categories": ["text-classification"], "pretty_name": "CuBERT ETH Py150 Benchmarks", "arxiv": 2001.00059, "dataset_info": [{"config_name": "exception_datasets", "features": [{"name": "function", "dtype": "string"}, {"name": "label", "dtype": "string"}, {"name": "info", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 25423003, "num_examples": 18480}, {"name": "dev", "num_bytes": 2845822, "num_examples": 2088}, {"name": "test", "num_bytes": 14064500, "num_examples": 10348}], "download_size": 16935273, "dataset_size": 42333325}, {"config_name": "function_docstring_datasets", "features": [{"name": "function", "dtype": "string"}, {"name": "docstring", "dtype": "string"}, {"name": "label", "dtype": "string"}, {"name": "info", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 261700491, "num_examples": 340846}, {"name": "dev", "num_bytes": 28498757, "num_examples": 37592}, {"name": "test", "num_bytes": 141660242, "num_examples": 186698}], "download_size": 121724722, "dataset_size": 431859490}, {"config_name": "swapped_operands_datasets", "features": [{"name": "function", "dtype": "string"}, {"name": "label", "dtype": "string"}, {"name": "info", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 271097336, "num_examples": 236246}, {"name": "dev", "num_bytes": 29986397, "num_examples": 26118}, {"name": "test", "num_bytes": 148544957, "num_examples": 130972}], "download_size": 105243573, "dataset_size": 449628690}, {"config_name": "variable_misuse_datasets", "features": [{"name": "function", "dtype": "string"}, {"name": "label", "dtype": "string"}, {"name": "info", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 474283355, "num_examples": 700708}, {"name": "dev", "num_bytes": 50447683, "num_examples": 75478}, {"name": "test", "num_bytes": 251591448, "num_examples": 378440}], "download_size": 231302039, "dataset_size": 776322486}, {"config_name": "variable_misuse_repair_datasets", "features": [{"name": "function", "sequence": "string"}, {"name": "target_mask", "sequence": "int64"}, {"name": "error_location_mask", "sequence": "int64"}, {"name": "candidate_mask", "sequence": "int64"}, {"name": "provenance", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 4417505142, "num_examples": 700708}, {"name": "dev", "num_bytes": 469436314, "num_examples": 75478}, {"name": "test", "num_bytes": 2331355329, "num_examples": 378440}], "download_size": 498300512, "dataset_size": 7218296785}, {"config_name": "wrong_binary_operator_datasets", "features": [{"name": "function", "dtype": "string"}, {"name": "label", "dtype": "string"}, {"name": "info", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 439948844, "num_examples": 459400}, {"name": "dev", "num_bytes": 47620848, "num_examples": 49804}, {"name": "test", "num_bytes": 239409450, "num_examples": 251804}], "download_size": 163088211, "dataset_size": 726979142}], "configs": [{"config_name": "exception_datasets", "data_files": [{"split": "train", "path": "exception_datasets/train-*"}, {"split": "dev", "path": "exception_datasets/dev-*"}, {"split": "test", "path": "exception_datasets/test-*"}]}, {"config_name": "function_docstring_datasets", "data_files": [{"split": "train", "path": "function_docstring_datasets/train-*"}, {"split": "dev", "path": "function_docstring_datasets/dev-*"}, {"split": "test", "path": "function_docstring_datasets/test-*"}]}, {"config_name": "swapped_operands_datasets", "data_files": [{"split": "train", "path": "swapped_operands_datasets/train-*"}, {"split": "dev", "path": "swapped_operands_datasets/dev-*"}, {"split": "test", "path": "swapped_operands_datasets/test-*"}]}, {"config_name": "variable_misuse_datasets", "data_files": [{"split": "train", "path": "variable_misuse_datasets/train-*"}, {"split": "dev", "path": "variable_misuse_datasets/dev-*"}, {"split": "test", "path": "variable_misuse_datasets/test-*"}]}, {"config_name": "variable_misuse_repair_datasets", "data_files": [{"split": "train", "path": "variable_misuse_repair_datasets/train-*"}, {"split": "dev", "path": "variable_misuse_repair_datasets/dev-*"}, {"split": "test", "path": "variable_misuse_repair_datasets/test-*"}]}, {"config_name": "wrong_binary_operator_datasets", "data_files": [{"split": "train", "path": "wrong_binary_operator_datasets/train-*"}, {"split": "dev", "path": "wrong_binary_operator_datasets/dev-*"}, {"split": "test", "path": "wrong_binary_operator_datasets/test-*"}]}], "tags": ["code"]}
2024-01-23T15:17:21+00:00
[ "2001.00059" ]
[]
TAGS #task_categories-text-classification #license-apache-2.0 #code #arxiv-2001.00059 #region-us
# CuBERT ETH150 Open Benchmarks This is an unofficial HuggingFace upload of the CuBERT ETH150 Open Benchmarks. This dataset was released along with Learning and Evaluating Contextual Embedding of Source Code. --- ## Benchmarks and Fine-Tuned Models Here we describe the 6 Python benchmarks we created. All 6 benchmarks were derived from ETH Py150 Open. All examples are stored as sharded text files. Each text line corresponds to a separate example encoded as a JSON object. For each dataset, we release separate training/validation/testing splits along the same boundaries that ETH Py150 Open splits its files to the corresponding splits. The fine-tuned models are the checkpoints of each model with the highest validation accuracy. 1. Function-docstring classification. Combinations of functions with their correct or incorrect documentation string, used to train a classifier that can tell which pairs go together. The JSON fields are: * 'function': string, the source code of a function as text * 'docstring': string, the documentation string for that function. Note that the string is unquoted. To be able to properly tokenize it with the CuBERT tokenizers, you have to wrap it in quotes first. For example, in Python, use 'string_to_tokenize = f'"""{docstring}"""''. * 'label': string, one of (“Incorrect”, “Correct”), the label of the example. * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function name and, for “Incorrect” examples, the function whose docstring was substituted. 1. Exception classification. Combinations of functions where one exception type has been masked, along with a label indicating the masked exception type. The JSON fields are: * 'function': string, the source code of a function as text, in which one exception type has been replaced with the special token “__HOLE__” * 'label': string, one of ('ValueError', 'KeyError', 'AttributeError', 'TypeError', 'OSError', 'IOError', 'ImportError', 'IndexError', 'DoesNotExist', 'KeyboardInterrupt', 'StopIteration', 'AssertionError', 'SystemExit', 'RuntimeError', 'HTTPError', 'UnicodeDecodeError', 'NotImplementedError', 'ValidationError', 'ObjectDoesNotExist', 'NameError', 'None'), the masked exception type. Note that 'None' never occurs in the data and will be removed in a future release. * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, and the fully-qualified function name. 1. Variable-misuse classification. Combinations of functions where one use of a variable may have been replaced with another variable defined in the same context, along with a label indicating if this bug-injection has occurred. The JSON fields are: * 'function': string, the source code of a function as text. * 'label': string, one of (“Correct”, “Variable misuse”) indicating if this is a buggy or bug-free example. * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the variable substitution that has occurred (e.g., “correct_variable” → “incorrect_variable”). 1. Swapped-operand classification. Combinations of functions where one use binary operator’s arguments have been swapped, to create a buggy example, or left undisturbed, along with a label indicating if this bug-injection has occurred. The JSON fields are: * 'function': string, the source code of a function as text. * 'label': string, one of (“Correct”, “Swapped operands”) indicating if this is a buggy or bug-free example. * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the operand swap has occurred (e.g., “swapped operands of 'not in'”). 1. Wrong-binary-operator classification. Combinations of functions where one binary operator has been swapped with another, to create a buggy example, or left undisturbed, along with a label indicating if this bug-injection has occurred. The JSON fields are: * 'function': string, the source code of a function as text. * 'label': string, one of (“Correct”, “Wrong binary operator”) indicating if this is a buggy or bug-free example. * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the operator replacement has occurred (e.g., “'=='-> '!='”). 1. Variable-misuse localization and repair. Combinations of functions where one use of a variable may have been replaced with another variable defined in the same context, along with information that can be used to localize and repair the bug, as well as the location of the bug if such a bug exists. The JSON fields are: * 'function': a list of strings, the source code of a function, tokenized with the vocabulary from item b. Note that, unlike other task datasets, this dataset gives a tokenized function, rather than the code as a single string. * 'target_mask': a list of integers (0 or 1). If the integer at some position is 1, then the token at the corresponding position of the function token list is a correct repair for the introduced bug. If a variable has been split into multiple tokens, only the first subtoken is marked in this mask. If the example is bug-free, all integers are 0. * 'error_location_mask': a list of integers (0 or 1). If the integer at some position is 1, then there is a variable-misuse bug at the corresponding location of the tokenized function. In a bug-free example, the first integer is 1. There is exactly one integer set to 1 for all examples. If a variable has been split into multiple tokens, only the first subtoken is marked in this mask. * 'candidate_mask': a list of integers (0 or 1). If the integer at some position is 1, then the variable starting at that position in the tokenized function is a candidate to consider when repairing a bug. Candidates are all variables defined in the function parameters or via variable declarations in the function. If a variable has been split into multiple tokens, only the first subtoken is marked in this mask, for each candidate. * 'provenance': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the buggy/repair token positions and variables (e.g., “16/18 'kwargs' → 'self'”). 16 is the position of the introduced error, 18 is the location of the repair.
[ "# CuBERT ETH150 Open Benchmarks\n\nThis is an unofficial HuggingFace upload of the CuBERT ETH150 Open Benchmarks. This dataset was released along with Learning and Evaluating Contextual Embedding of Source Code.\n\n---", "## Benchmarks and Fine-Tuned Models\n\nHere we describe the 6 Python benchmarks we created. All 6 benchmarks were derived from ETH Py150 Open. All examples are stored as sharded text files. Each text line corresponds to a separate example encoded as a JSON object. For each dataset, we release separate training/validation/testing splits along the same boundaries that ETH Py150 Open splits its files to the corresponding splits. The fine-tuned models are the checkpoints of each model with the highest validation accuracy.\n\n1. Function-docstring classification. Combinations of functions with their correct or incorrect documentation string, used to train a classifier that can tell which pairs go together. The JSON fields are:\n * 'function': string, the source code of a function as text\n * 'docstring': string, the documentation string for that function. Note that the string is unquoted. To be able to properly tokenize it with the CuBERT tokenizers, you have to wrap it in quotes first. For example, in Python, use 'string_to_tokenize = f'\"\"\"{docstring}\"\"\"''.\n * 'label': string, one of (“Incorrect”, “Correct”), the label of the example.\n * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function name and, for “Incorrect” examples, the function whose docstring was substituted.\n1. Exception classification. Combinations of functions where one exception type has been masked, along with a label indicating the masked exception type. The JSON fields are:\n * 'function': string, the source code of a function as text, in which one exception type has been replaced with the special token “__HOLE__”\n * 'label': string, one of ('ValueError', 'KeyError', 'AttributeError', 'TypeError', 'OSError', 'IOError', 'ImportError', 'IndexError', 'DoesNotExist', 'KeyboardInterrupt', 'StopIteration', 'AssertionError', 'SystemExit', 'RuntimeError', 'HTTPError', 'UnicodeDecodeError', 'NotImplementedError', 'ValidationError', 'ObjectDoesNotExist', 'NameError', 'None'), the masked exception type. Note that 'None' never occurs in the data and will be removed in a future release.\n * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, and the fully-qualified function name.\n1. Variable-misuse classification. Combinations of functions where one use of a variable may have been replaced with another variable defined in the same context, along with a label indicating if this bug-injection has occurred. The JSON fields are:\n * 'function': string, the source code of a function as text.\n * 'label': string, one of (“Correct”, “Variable misuse”) indicating if this is a buggy or bug-free example.\n * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the variable substitution that has occurred (e.g., “correct_variable” → “incorrect_variable”).\n1. Swapped-operand classification. Combinations of functions where one use binary operator’s arguments have been swapped, to create a buggy example, or left undisturbed, along with a label indicating if this bug-injection has occurred. The JSON fields are:\n * 'function': string, the source code of a function as text.\n * 'label': string, one of (“Correct”, “Swapped operands”) indicating if this is a buggy or bug-free example.\n * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the operand swap has occurred (e.g., “swapped operands of 'not in'”).\n1. Wrong-binary-operator classification. Combinations of functions where one binary operator has been swapped with another, to create a buggy example, or left undisturbed, along with a label indicating if this bug-injection has occurred. The JSON fields are:\n * 'function': string, the source code of a function as text.\n * 'label': string, one of (“Correct”, “Wrong binary operator”) indicating if this is a buggy or bug-free example.\n * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the operator replacement has occurred (e.g., “'=='-> '!='”).\n1. Variable-misuse localization and repair. Combinations of functions where one use of a variable may have been replaced with another variable defined in the same context, along with information that can be used to localize and repair the bug, as well as the location of the bug if such a bug exists. The JSON fields are:\n * 'function': a list of strings, the source code of a function, tokenized with the vocabulary from item b. Note that, unlike other task datasets, this dataset gives a tokenized function, rather than the code as a single string.\n * 'target_mask': a list of integers (0 or 1). If the integer at some position is 1, then the token at the corresponding position of the function token list is a correct repair for the introduced bug. If a variable has been split into multiple tokens, only the first subtoken is marked in this mask. If the example is bug-free, all integers are 0.\n * 'error_location_mask': a list of integers (0 or 1). If the integer at some position is 1, then there is a variable-misuse bug at the corresponding location of the tokenized function. In a bug-free example, the first integer is 1. There is exactly one integer set to 1 for all examples. If a variable has been split into multiple tokens, only the first subtoken is marked in this mask.\n * 'candidate_mask': a list of integers (0 or 1). If the integer at some position is 1, then the variable starting at that position in the tokenized function is a candidate to consider when repairing a bug. Candidates are all variables defined in the function parameters or via variable declarations in the function. If a variable has been split into multiple tokens, only the first subtoken is marked in this mask, for each candidate.\n * 'provenance': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the buggy/repair token positions and variables (e.g., “16/18 'kwargs' → 'self'”). 16 is the position of the introduced error, 18 is the location of the repair." ]
[ "TAGS\n#task_categories-text-classification #license-apache-2.0 #code #arxiv-2001.00059 #region-us \n", "# CuBERT ETH150 Open Benchmarks\n\nThis is an unofficial HuggingFace upload of the CuBERT ETH150 Open Benchmarks. This dataset was released along with Learning and Evaluating Contextual Embedding of Source Code.\n\n---", "## Benchmarks and Fine-Tuned Models\n\nHere we describe the 6 Python benchmarks we created. All 6 benchmarks were derived from ETH Py150 Open. All examples are stored as sharded text files. Each text line corresponds to a separate example encoded as a JSON object. For each dataset, we release separate training/validation/testing splits along the same boundaries that ETH Py150 Open splits its files to the corresponding splits. The fine-tuned models are the checkpoints of each model with the highest validation accuracy.\n\n1. Function-docstring classification. Combinations of functions with their correct or incorrect documentation string, used to train a classifier that can tell which pairs go together. The JSON fields are:\n * 'function': string, the source code of a function as text\n * 'docstring': string, the documentation string for that function. Note that the string is unquoted. To be able to properly tokenize it with the CuBERT tokenizers, you have to wrap it in quotes first. For example, in Python, use 'string_to_tokenize = f'\"\"\"{docstring}\"\"\"''.\n * 'label': string, one of (“Incorrect”, “Correct”), the label of the example.\n * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function name and, for “Incorrect” examples, the function whose docstring was substituted.\n1. Exception classification. Combinations of functions where one exception type has been masked, along with a label indicating the masked exception type. The JSON fields are:\n * 'function': string, the source code of a function as text, in which one exception type has been replaced with the special token “__HOLE__”\n * 'label': string, one of ('ValueError', 'KeyError', 'AttributeError', 'TypeError', 'OSError', 'IOError', 'ImportError', 'IndexError', 'DoesNotExist', 'KeyboardInterrupt', 'StopIteration', 'AssertionError', 'SystemExit', 'RuntimeError', 'HTTPError', 'UnicodeDecodeError', 'NotImplementedError', 'ValidationError', 'ObjectDoesNotExist', 'NameError', 'None'), the masked exception type. Note that 'None' never occurs in the data and will be removed in a future release.\n * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, and the fully-qualified function name.\n1. Variable-misuse classification. Combinations of functions where one use of a variable may have been replaced with another variable defined in the same context, along with a label indicating if this bug-injection has occurred. The JSON fields are:\n * 'function': string, the source code of a function as text.\n * 'label': string, one of (“Correct”, “Variable misuse”) indicating if this is a buggy or bug-free example.\n * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the variable substitution that has occurred (e.g., “correct_variable” → “incorrect_variable”).\n1. Swapped-operand classification. Combinations of functions where one use binary operator’s arguments have been swapped, to create a buggy example, or left undisturbed, along with a label indicating if this bug-injection has occurred. The JSON fields are:\n * 'function': string, the source code of a function as text.\n * 'label': string, one of (“Correct”, “Swapped operands”) indicating if this is a buggy or bug-free example.\n * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the operand swap has occurred (e.g., “swapped operands of 'not in'”).\n1. Wrong-binary-operator classification. Combinations of functions where one binary operator has been swapped with another, to create a buggy example, or left undisturbed, along with a label indicating if this bug-injection has occurred. The JSON fields are:\n * 'function': string, the source code of a function as text.\n * 'label': string, one of (“Correct”, “Wrong binary operator”) indicating if this is a buggy or bug-free example.\n * 'info': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the operator replacement has occurred (e.g., “'=='-> '!='”).\n1. Variable-misuse localization and repair. Combinations of functions where one use of a variable may have been replaced with another variable defined in the same context, along with information that can be used to localize and repair the bug, as well as the location of the bug if such a bug exists. The JSON fields are:\n * 'function': a list of strings, the source code of a function, tokenized with the vocabulary from item b. Note that, unlike other task datasets, this dataset gives a tokenized function, rather than the code as a single string.\n * 'target_mask': a list of integers (0 or 1). If the integer at some position is 1, then the token at the corresponding position of the function token list is a correct repair for the introduced bug. If a variable has been split into multiple tokens, only the first subtoken is marked in this mask. If the example is bug-free, all integers are 0.\n * 'error_location_mask': a list of integers (0 or 1). If the integer at some position is 1, then there is a variable-misuse bug at the corresponding location of the tokenized function. In a bug-free example, the first integer is 1. There is exactly one integer set to 1 for all examples. If a variable has been split into multiple tokens, only the first subtoken is marked in this mask.\n * 'candidate_mask': a list of integers (0 or 1). If the integer at some position is 1, then the variable starting at that position in the tokenized function is a candidate to consider when repairing a bug. Candidates are all variables defined in the function parameters or via variable declarations in the function. If a variable has been split into multiple tokens, only the first subtoken is marked in this mask, for each candidate.\n * 'provenance': string, an unformatted description of how the example was constructed, including the source dataset (always “ETHPy150Open”), the repository and filepath, the function, and whether the example is bugfree (marked “original”) or the buggy/repair token positions and variables (e.g., “16/18 'kwargs' → 'self'”). 16 is the position of the introduced error, 18 is the location of the repair." ]
3a59ef70860965b73728cb0fb21f41dd3e5cdb82
# Dataset Card for "kmb-ai/vaia-ic" ## Dataset Description - **Repository:** [vaia-ner](https://huggingface.co/datasets/kmb-ai/vaia-ic) - **Dataset:** vaia-ic - **Domain:** Hotel Q&A - **Number of Classes:** 7 ### Dataset Summary vaia-ic is an intent classification dataset created as part of Vaia project. ``` Classes: list -1 information 0 cuisine 1 directions 2 reservations 3 open_hours 4 rating 5. ``` ## Dataset Structure ### Data Instances Examples of `train` looks as follows. ``` -1, Where can I get a classic New York bagel with cream cheese? 0, Provide details about the newly opened eatery called Palate Paradise. 1, Where can I find a restaurant with a unique fusion of Caribbean and Asian cuisines? 2, How can I get to the seafood bistro Oceanic Oasis from the central train station? 3, I want to book a table for five at Flame Grill for this Sunday. 4, Is the cafe Morning Glory open for breakfast during the weekdays? 5, Suggest a place known for its exceptional service and a menu that caters to vegans. ```
kmb-ai/vaia-dataset-ic
[ "task_categories:text-classification", "task_ids:intent-classification", "multilinguality:monolingual", "size_categories:1K<n<10K", "language:en", "license:other", "region:us" ]
2024-01-23T13:54:22+00:00
{"language": ["en"], "license": ["other"], "multilinguality": ["monolingual"], "size_categories": ["1K<n<10K"], "task_categories": ["text-classification"], "task_ids": ["intent-classification"], "pretty_name": "Vaia Intent Classification Dataset"}
2024-01-27T07:42:16+00:00
[]
[ "en" ]
TAGS #task_categories-text-classification #task_ids-intent-classification #multilinguality-monolingual #size_categories-1K<n<10K #language-English #license-other #region-us
# Dataset Card for "kmb-ai/vaia-ic" ## Dataset Description - Repository: vaia-ner - Dataset: vaia-ic - Domain: Hotel Q&A - Number of Classes: 7 ### Dataset Summary vaia-ic is an intent classification dataset created as part of Vaia project. ## Dataset Structure ### Data Instances Examples of 'train' looks as follows.
[ "# Dataset Card for \"kmb-ai/vaia-ic\"", "## Dataset Description\n\n- Repository: vaia-ner\n- Dataset: vaia-ic\n- Domain: Hotel Q&A\n- Number of Classes: 7", "### Dataset Summary\nvaia-ic is an intent classification dataset created as part of Vaia project.", "## Dataset Structure", "### Data Instances\nExamples of 'train' looks as follows." ]
[ "TAGS\n#task_categories-text-classification #task_ids-intent-classification #multilinguality-monolingual #size_categories-1K<n<10K #language-English #license-other #region-us \n", "# Dataset Card for \"kmb-ai/vaia-ic\"", "## Dataset Description\n\n- Repository: vaia-ner\n- Dataset: vaia-ic\n- Domain: Hotel Q&A\n- Number of Classes: 7", "### Dataset Summary\nvaia-ic is an intent classification dataset created as part of Vaia project.", "## Dataset Structure", "### Data Instances\nExamples of 'train' looks as follows." ]
ce210924f622398e0347d4a9cc62e7ce52052eee
# Dataset Card for "Trial7" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Dstycoon/Trial7
[ "region:us" ]
2024-01-23T13:55:39+00:00
{"dataset_info": {"features": [{"name": "data", "dtype": "string"}, {"name": "conversation", "dtype": "string"}, {"name": "predicted_disease", "dtype": "string"}, {"name": "rationale", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 72976, "num_examples": 10}], "download_size": 0, "dataset_size": 72976}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-23T14:02:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Trial7" More Information needed
[ "# Dataset Card for \"Trial7\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Trial7\"\n\nMore Information needed" ]
23573c171edd0d05ea285f819ce25677d6f07691
# Dataset Card for "10-dim" ### Map labels to strings ```python # Here's the list of labels and mappings between id and label. labels = [ "social_support", "conflict", "trust", "fun", "similarity", "identity", "respect", "romance", "knowledge", "power", ] id2label = {i: label for i, label in enumerate(labels)} label2id = {label: i for i, label in enumerate(labels)} # Given an examples, this is how you map sample = { "text": "This is just a made up text" "labels": [0, 0, 0, 1, 0, 0, 0, 0, 0, 1] } labels_str = [id2label[i] for i, label in enumerate(sample['labels']) if label == 1] ``` [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
AndersGiovanni/10-dim
[ "task_categories:text-classification", "size_categories:1K<n<10K", "language:en", "license:mit", "region:us" ]
2024-01-23T13:57:01+00:00
{"language": ["en"], "license": "mit", "size_categories": ["1K<n<10K"], "task_categories": ["text-classification"], "pretty_name": "10 Social Dimensions", "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "labels", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 2237355.6300445576, "num_examples": 5498}, {"name": "validation", "num_bytes": 479375.2150222788, "num_examples": 1178}, {"name": "test", "num_bytes": 479782.1549331636, "num_examples": 1179}], "download_size": 1723668, "dataset_size": 3196513.0}}
2024-01-25T12:58:54+00:00
[]
[ "en" ]
TAGS #task_categories-text-classification #size_categories-1K<n<10K #language-English #license-mit #region-us
# Dataset Card for "10-dim" ### Map labels to strings More Information needed
[ "# Dataset Card for \"10-dim\"", "### Map labels to strings\n\n\n\nMore Information needed" ]
[ "TAGS\n#task_categories-text-classification #size_categories-1K<n<10K #language-English #license-mit #region-us \n", "# Dataset Card for \"10-dim\"", "### Map labels to strings\n\n\n\nMore Information needed" ]
6b9ad1876755c52afbf50f77c1e367f9b0502347
This is not my dataset, found this (but don't remember the source 😅) --- task_categories: - text-generation language: - en tags: - sql - code ---
adi-kmt/sql-dataset
[ "region:us" ]
2024-01-23T13:58:15+00:00
{}
2024-01-23T14:00:30+00:00
[]
[]
TAGS #region-us
This is not my dataset, found this (but don't remember the source ) --- task_categories: - text-generation language: - en tags: - sql - code ---
[]
[ "TAGS\n#region-us \n" ]
3b1671dff5551738111f82bf4622e84e294ac459
# Dataset Card for "trial8" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Dstycoon/trial8
[ "region:us" ]
2024-01-23T14:08:21+00:00
{"dataset_info": {"features": [{"name": "data", "dtype": "string"}, {"name": "conversation", "dtype": "string"}, {"name": "predicted_disease", "dtype": "string"}, {"name": "rationale", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 56747, "num_examples": 10}], "download_size": 61186, "dataset_size": 56747}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-23T14:12:30+00:00
[]
[]
TAGS #region-us
# Dataset Card for "trial8" More Information needed
[ "# Dataset Card for \"trial8\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"trial8\"\n\nMore Information needed" ]
c1f924f78be4fdf5cbb84d224a1e9995040aa449
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
tuantucyd/testing_vid_img
[ "region:us" ]
2024-01-23T14:23:31+00:00
{}
2024-01-30T15:56:17+00:00
[]
[]
TAGS #region-us
# Dataset Card for Dataset Name This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
8135b38ec2903ba970efaccf0788a331d567c63c
# Dataset Card for "testonly" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
happydale/testonly
[ "region:us" ]
2024-01-23T14:43:12+00:00
{"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 26390405, "num_examples": 51325}, {"name": "val", "num_bytes": 1777077, "num_examples": 3500}], "download_size": 5652485, "dataset_size": 28167482}}
2024-01-23T14:48:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for "testonly" More Information needed
[ "# Dataset Card for \"testonly\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"testonly\"\n\nMore Information needed" ]
543380e8431729d49d89bdb58f5d27507d84680b
# Dataset Card for Evaluation run of xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss](https://huggingface.co/xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_xformAI__facebook-opt-125m-qcqa-ub-6-best-for-q-loss", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T14:48:42.032735](https://huggingface.co/datasets/open-llm-leaderboard/details_xformAI__facebook-opt-125m-qcqa-ub-6-best-for-q-loss/blob/main/results_2024-01-23T14-48-42.032735.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.23185759227721037, "acc_stderr": 0.029943351088273156, "acc_norm": 0.23198078471559838, "acc_norm_stderr": 0.03073349021074415, "mc1": 0.2386780905752754, "mc1_stderr": 0.014922629695456418, "mc2": 0.49027891527401446, "mc2_stderr": 0.016136979087813173 }, "harness|arc:challenge|25": { "acc": 0.19368600682593856, "acc_stderr": 0.01154842540997854, "acc_norm": 0.23293515358361774, "acc_norm_stderr": 0.012352507042617401 }, "harness|hellaswag|10": { "acc": 0.2597092212706632, "acc_stderr": 0.004375788991216851, "acc_norm": 0.2557259510057757, "acc_norm_stderr": 0.004353768730644558 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.25, "acc_stderr": 0.03621034121889507, "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.1477832512315271, "acc_stderr": 0.02496962133352127, "acc_norm": 0.1477832512315271, "acc_norm_stderr": 0.02496962133352127 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.24848484848484848, "acc_stderr": 0.03374402644139404, "acc_norm": 0.24848484848484848, "acc_norm_stderr": 0.03374402644139404 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20512820512820512, "acc_stderr": 0.02047323317355198, "acc_norm": 0.20512820512820512, "acc_norm_stderr": 0.02047323317355198 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.026265024608275882, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.026265024608275882 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1981651376146789, "acc_stderr": 0.017090573804217888, "acc_norm": 0.1981651376146789, "acc_norm_stderr": 0.017090573804217888 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25980392156862747, "acc_stderr": 0.030778554678693268, "acc_norm": 0.25980392156862747, "acc_norm_stderr": 0.030778554678693268 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.23140495867768596, "acc_stderr": 0.03849856098794089, "acc_norm": 0.23140495867768596, "acc_norm_stderr": 0.03849856098794089 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2692307692307692, "acc_stderr": 0.029058588303748842, "acc_norm": 0.2692307692307692, "acc_norm_stderr": 0.029058588303748842 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2514450867052023, "acc_stderr": 0.02335736578587404, "acc_norm": 0.2514450867052023, "acc_norm_stderr": 0.02335736578587404 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.19292604501607716, "acc_stderr": 0.022411516780911366, "acc_norm": 0.19292604501607716, "acc_norm_stderr": 0.022411516780911366 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.2386780905752754, "mc1_stderr": 0.014922629695456418, "mc2": 0.49027891527401446, "mc2_stderr": 0.016136979087813173 }, "harness|winogrande|5": { "acc": 0.49171270718232046, "acc_stderr": 0.014050555322824192 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_xformAI__facebook-opt-125m-qcqa-ub-6-best-for-q-loss
[ "region:us" ]
2024-01-23T14:50:23+00:00
{"pretty_name": "Evaluation run of xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss", "dataset_summary": "Dataset automatically created during the evaluation run of model [xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss](https://huggingface.co/xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_xformAI__facebook-opt-125m-qcqa-ub-6-best-for-q-loss\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T14:48:42.032735](https://huggingface.co/datasets/open-llm-leaderboard/details_xformAI__facebook-opt-125m-qcqa-ub-6-best-for-q-loss/blob/main/results_2024-01-23T14-48-42.032735.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23185759227721037,\n \"acc_stderr\": 0.029943351088273156,\n \"acc_norm\": 0.23198078471559838,\n \"acc_norm_stderr\": 0.03073349021074415,\n \"mc1\": 0.2386780905752754,\n \"mc1_stderr\": 0.014922629695456418,\n \"mc2\": 0.49027891527401446,\n \"mc2_stderr\": 0.016136979087813173\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.19368600682593856,\n \"acc_stderr\": 0.01154842540997854,\n \"acc_norm\": 0.23293515358361774,\n \"acc_norm_stderr\": 0.012352507042617401\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2597092212706632,\n \"acc_stderr\": 0.004375788991216851,\n \"acc_norm\": 0.2557259510057757,\n \"acc_norm_stderr\": 0.004353768730644558\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.041583075330832865,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.041583075330832865\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.1477832512315271,\n \"acc_stderr\": 0.02496962133352127,\n \"acc_norm\": 0.1477832512315271,\n \"acc_norm_stderr\": 0.02496962133352127\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.24848484848484848,\n \"acc_stderr\": 0.03374402644139404,\n \"acc_norm\": 0.24848484848484848,\n \"acc_norm_stderr\": 0.03374402644139404\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.20512820512820512,\n \"acc_stderr\": 0.02047323317355198,\n \"acc_norm\": 0.20512820512820512,\n \"acc_norm_stderr\": 0.02047323317355198\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.026265024608275882,\n \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.026265024608275882\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1981651376146789,\n \"acc_stderr\": 0.017090573804217888,\n \"acc_norm\": 0.1981651376146789,\n \"acc_norm_stderr\": 0.017090573804217888\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25980392156862747,\n \"acc_stderr\": 0.030778554678693268,\n \"acc_norm\": 0.25980392156862747,\n \"acc_norm_stderr\": 0.030778554678693268\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.23140495867768596,\n \"acc_stderr\": 0.03849856098794089,\n \"acc_norm\": 0.23140495867768596,\n \"acc_norm_stderr\": 0.03849856098794089\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2692307692307692,\n \"acc_stderr\": 0.029058588303748842,\n \"acc_norm\": 0.2692307692307692,\n \"acc_norm_stderr\": 0.029058588303748842\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2514450867052023,\n \"acc_stderr\": 0.02335736578587404,\n \"acc_norm\": 0.2514450867052023,\n \"acc_norm_stderr\": 0.02335736578587404\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.19292604501607716,\n \"acc_stderr\": 0.022411516780911366,\n \"acc_norm\": 0.19292604501607716,\n \"acc_norm_stderr\": 0.022411516780911366\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2386780905752754,\n \"mc1_stderr\": 0.014922629695456418,\n \"mc2\": 0.49027891527401446,\n \"mc2_stderr\": 0.016136979087813173\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.49171270718232046,\n \"acc_stderr\": 0.014050555322824192\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|arc:challenge|25_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|gsm8k|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hellaswag|10_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T14-48-42.032735.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["**/details_harness|winogrande|5_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T14-48-42.032735.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T14_48_42.032735", "path": ["results_2024-01-23T14-48-42.032735.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T14-48-42.032735.parquet"]}]}]}
2024-01-23T14:50:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss Dataset automatically created during the evaluation run of model xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T14:48:42.032735(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss\n\n\n\nDataset automatically created during the evaluation run of model xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T14:48:42.032735(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss\n\n\n\nDataset automatically created during the evaluation run of model xformAI/facebook-opt-125m-qcqa-ub-6-best-for-q-loss on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T14:48:42.032735(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
a7633742a5398ad50cb0fcbf2a250bc2afd11572
# Dataset Card for Evaluation run of Cartinoe5930/TIES-Merging <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Cartinoe5930/TIES-Merging](https://huggingface.co/Cartinoe5930/TIES-Merging) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Cartinoe5930__TIES-Merging", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T14:48:42.091163](https://huggingface.co/datasets/open-llm-leaderboard/details_Cartinoe5930__TIES-Merging/blob/main/results_2024-01-23T14-48-42.091163.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.51453863896115, "acc_stderr": 0.034362486729297426, "acc_norm": 0.520951562938696, "acc_norm_stderr": 0.03511500331514224, "mc1": 0.27539779681762544, "mc1_stderr": 0.015638135667775516, "mc2": 0.41245749372615664, "mc2_stderr": 0.014877902993478492 }, "harness|arc:challenge|25": { "acc": 0.5247440273037542, "acc_stderr": 0.014593487694937738, "acc_norm": 0.5810580204778157, "acc_norm_stderr": 0.014418106953639015 }, "harness|hellaswag|10": { "acc": 0.5466042620991834, "acc_stderr": 0.0049680589444721585, "acc_norm": 0.7574188408683529, "acc_norm_stderr": 0.004277678115910419 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.046482319871173156, "acc_norm": 0.31, "acc_norm_stderr": 0.046482319871173156 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5037037037037037, "acc_stderr": 0.043192236258113303, "acc_norm": 0.5037037037037037, "acc_norm_stderr": 0.043192236258113303 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5723684210526315, "acc_stderr": 0.04026097083296564, "acc_norm": 0.5723684210526315, "acc_norm_stderr": 0.04026097083296564 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5811320754716981, "acc_stderr": 0.030365050829115205, "acc_norm": 0.5811320754716981, "acc_norm_stderr": 0.030365050829115205 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5972222222222222, "acc_stderr": 0.04101405519842426, "acc_norm": 0.5972222222222222, "acc_norm_stderr": 0.04101405519842426 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5086705202312138, "acc_stderr": 0.03811890988940412, "acc_norm": 0.5086705202312138, "acc_norm_stderr": 0.03811890988940412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.04655010411319616, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.04655010411319616 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4723404255319149, "acc_stderr": 0.03263597118409769, "acc_norm": 0.4723404255319149, "acc_norm_stderr": 0.03263597118409769 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.34210526315789475, "acc_stderr": 0.04462917535336936, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.04462917535336936 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.503448275862069, "acc_stderr": 0.041665675771015785, "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.041665675771015785 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3862433862433862, "acc_stderr": 0.02507598176760168, "acc_norm": 0.3862433862433862, "acc_norm_stderr": 0.02507598176760168 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.041905964388711366, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6096774193548387, "acc_stderr": 0.027751256636969576, "acc_norm": 0.6096774193548387, "acc_norm_stderr": 0.027751256636969576 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3842364532019704, "acc_stderr": 0.034223985656575494, "acc_norm": 0.3842364532019704, "acc_norm_stderr": 0.034223985656575494 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.3333333333333333, "acc_stderr": 0.0368105086916155, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.0368105086916155 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6616161616161617, "acc_stderr": 0.033711241426263014, "acc_norm": 0.6616161616161617, "acc_norm_stderr": 0.033711241426263014 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7979274611398963, "acc_stderr": 0.02897908979429673, "acc_norm": 0.7979274611398963, "acc_norm_stderr": 0.02897908979429673 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5538461538461539, "acc_stderr": 0.02520357177302833, "acc_norm": 0.5538461538461539, "acc_norm_stderr": 0.02520357177302833 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833713, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833713 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4957983193277311, "acc_stderr": 0.03247734334448111, "acc_norm": 0.4957983193277311, "acc_norm_stderr": 0.03247734334448111 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6770642201834862, "acc_stderr": 0.02004811592341531, "acc_norm": 0.6770642201834862, "acc_norm_stderr": 0.02004811592341531 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3425925925925926, "acc_stderr": 0.032365852526021574, "acc_norm": 0.3425925925925926, "acc_norm_stderr": 0.032365852526021574 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.35294117647058826, "acc_stderr": 0.03354092437591521, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.03354092437591521 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5358649789029536, "acc_stderr": 0.03246338898055659, "acc_norm": 0.5358649789029536, "acc_norm_stderr": 0.03246338898055659 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.600896860986547, "acc_stderr": 0.03286745312567961, "acc_norm": 0.600896860986547, "acc_norm_stderr": 0.03286745312567961 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6259541984732825, "acc_stderr": 0.042438692422305246, "acc_norm": 0.6259541984732825, "acc_norm_stderr": 0.042438692422305246 }, "harness|hendrycksTest-international_law|5": { "acc": 0.743801652892562, "acc_stderr": 0.039849796533028725, "acc_norm": 0.743801652892562, "acc_norm_stderr": 0.039849796533028725 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6296296296296297, "acc_stderr": 0.04668408033024931, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.04668408033024931 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.588957055214724, "acc_stderr": 0.038656978537853624, "acc_norm": 0.588957055214724, "acc_norm_stderr": 0.038656978537853624 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.6310679611650486, "acc_stderr": 0.0477761518115674, "acc_norm": 0.6310679611650486, "acc_norm_stderr": 0.0477761518115674 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7692307692307693, "acc_stderr": 0.027601921381417604, "acc_norm": 0.7692307692307693, "acc_norm_stderr": 0.027601921381417604 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6998722860791826, "acc_stderr": 0.016389249691317425, "acc_norm": 0.6998722860791826, "acc_norm_stderr": 0.016389249691317425 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6127167630057804, "acc_stderr": 0.026226158605124658, "acc_norm": 0.6127167630057804, "acc_norm_stderr": 0.026226158605124658 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.36201117318435755, "acc_stderr": 0.01607306735015309, "acc_norm": 0.36201117318435755, "acc_norm_stderr": 0.01607306735015309 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5555555555555556, "acc_stderr": 0.028452639985088013, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.028452639985088013 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5691318327974276, "acc_stderr": 0.02812534098397271, "acc_norm": 0.5691318327974276, "acc_norm_stderr": 0.02812534098397271 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5617283950617284, "acc_stderr": 0.027607914087400473, "acc_norm": 0.5617283950617284, "acc_norm_stderr": 0.027607914087400473 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.40425531914893614, "acc_stderr": 0.02927553215970472, "acc_norm": 0.40425531914893614, "acc_norm_stderr": 0.02927553215970472 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3200782268578879, "acc_stderr": 0.011914791947638533, "acc_norm": 0.3200782268578879, "acc_norm_stderr": 0.011914791947638533 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4522058823529412, "acc_stderr": 0.030233758551596452, "acc_norm": 0.4522058823529412, "acc_norm_stderr": 0.030233758551596452 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5212418300653595, "acc_stderr": 0.020209572388600248, "acc_norm": 0.5212418300653595, "acc_norm_stderr": 0.020209572388600248 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.04582004841505416, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.04582004841505416 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6122448979591837, "acc_stderr": 0.03119223072679566, "acc_norm": 0.6122448979591837, "acc_norm_stderr": 0.03119223072679566 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6965174129353234, "acc_stderr": 0.03251006816458618, "acc_norm": 0.6965174129353234, "acc_norm_stderr": 0.03251006816458618 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-virology|5": { "acc": 0.4397590361445783, "acc_stderr": 0.03864139923699121, "acc_norm": 0.4397590361445783, "acc_norm_stderr": 0.03864139923699121 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7192982456140351, "acc_stderr": 0.034462962170884265, "acc_norm": 0.7192982456140351, "acc_norm_stderr": 0.034462962170884265 }, "harness|truthfulqa:mc|0": { "mc1": 0.27539779681762544, "mc1_stderr": 0.015638135667775516, "mc2": 0.41245749372615664, "mc2_stderr": 0.014877902993478492 }, "harness|winogrande|5": { "acc": 0.7237569060773481, "acc_stderr": 0.01256681501569816 }, "harness|gsm8k|5": { "acc": 0.19408642911296436, "acc_stderr": 0.010893918308192424 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Cartinoe5930__TIES-Merging
[ "region:us" ]
2024-01-23T14:51:04+00:00
{"pretty_name": "Evaluation run of Cartinoe5930/TIES-Merging", "dataset_summary": "Dataset automatically created during the evaluation run of model [Cartinoe5930/TIES-Merging](https://huggingface.co/Cartinoe5930/TIES-Merging) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Cartinoe5930__TIES-Merging\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T14:48:42.091163](https://huggingface.co/datasets/open-llm-leaderboard/details_Cartinoe5930__TIES-Merging/blob/main/results_2024-01-23T14-48-42.091163.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.51453863896115,\n \"acc_stderr\": 0.034362486729297426,\n \"acc_norm\": 0.520951562938696,\n \"acc_norm_stderr\": 0.03511500331514224,\n \"mc1\": 0.27539779681762544,\n \"mc1_stderr\": 0.015638135667775516,\n \"mc2\": 0.41245749372615664,\n \"mc2_stderr\": 0.014877902993478492\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5247440273037542,\n \"acc_stderr\": 0.014593487694937738,\n \"acc_norm\": 0.5810580204778157,\n \"acc_norm_stderr\": 0.014418106953639015\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5466042620991834,\n \"acc_stderr\": 0.0049680589444721585,\n \"acc_norm\": 0.7574188408683529,\n \"acc_norm_stderr\": 0.004277678115910419\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.046482319871173156,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.046482319871173156\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5037037037037037,\n \"acc_stderr\": 0.043192236258113303,\n \"acc_norm\": 0.5037037037037037,\n \"acc_norm_stderr\": 0.043192236258113303\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5723684210526315,\n \"acc_stderr\": 0.04026097083296564,\n \"acc_norm\": 0.5723684210526315,\n \"acc_norm_stderr\": 0.04026097083296564\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5811320754716981,\n \"acc_stderr\": 0.030365050829115205,\n \"acc_norm\": 0.5811320754716981,\n \"acc_norm_stderr\": 0.030365050829115205\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5972222222222222,\n \"acc_stderr\": 0.04101405519842426,\n \"acc_norm\": 0.5972222222222222,\n \"acc_norm_stderr\": 0.04101405519842426\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5086705202312138,\n \"acc_stderr\": 0.03811890988940412,\n \"acc_norm\": 0.5086705202312138,\n \"acc_norm_stderr\": 0.03811890988940412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3235294117647059,\n \"acc_stderr\": 0.04655010411319616,\n \"acc_norm\": 0.3235294117647059,\n \"acc_norm_stderr\": 0.04655010411319616\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4723404255319149,\n \"acc_stderr\": 0.03263597118409769,\n \"acc_norm\": 0.4723404255319149,\n \"acc_norm_stderr\": 0.03263597118409769\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.34210526315789475,\n \"acc_stderr\": 0.04462917535336936,\n \"acc_norm\": 0.34210526315789475,\n \"acc_norm_stderr\": 0.04462917535336936\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.041665675771015785,\n \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.041665675771015785\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3862433862433862,\n \"acc_stderr\": 0.02507598176760168,\n \"acc_norm\": 0.3862433862433862,\n \"acc_norm_stderr\": 0.02507598176760168\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3253968253968254,\n \"acc_stderr\": 0.041905964388711366,\n \"acc_norm\": 0.3253968253968254,\n \"acc_norm_stderr\": 0.041905964388711366\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6096774193548387,\n \"acc_stderr\": 0.027751256636969576,\n \"acc_norm\": 0.6096774193548387,\n \"acc_norm_stderr\": 0.027751256636969576\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3842364532019704,\n \"acc_stderr\": 0.034223985656575494,\n \"acc_norm\": 0.3842364532019704,\n \"acc_norm_stderr\": 0.034223985656575494\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.0368105086916155,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.0368105086916155\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6616161616161617,\n \"acc_stderr\": 0.033711241426263014,\n \"acc_norm\": 0.6616161616161617,\n \"acc_norm_stderr\": 0.033711241426263014\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7979274611398963,\n \"acc_stderr\": 0.02897908979429673,\n \"acc_norm\": 0.7979274611398963,\n \"acc_norm_stderr\": 0.02897908979429673\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5538461538461539,\n \"acc_stderr\": 0.02520357177302833,\n \"acc_norm\": 0.5538461538461539,\n \"acc_norm_stderr\": 0.02520357177302833\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26296296296296295,\n \"acc_stderr\": 0.026842057873833713,\n \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.026842057873833713\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.4957983193277311,\n \"acc_stderr\": 0.03247734334448111,\n \"acc_norm\": 0.4957983193277311,\n \"acc_norm_stderr\": 0.03247734334448111\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6770642201834862,\n \"acc_stderr\": 0.02004811592341531,\n \"acc_norm\": 0.6770642201834862,\n \"acc_norm_stderr\": 0.02004811592341531\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3425925925925926,\n \"acc_stderr\": 0.032365852526021574,\n \"acc_norm\": 0.3425925925925926,\n \"acc_norm_stderr\": 0.032365852526021574\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.03354092437591521,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.03354092437591521\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.5358649789029536,\n \"acc_stderr\": 0.03246338898055659,\n \"acc_norm\": 0.5358649789029536,\n \"acc_norm_stderr\": 0.03246338898055659\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.600896860986547,\n \"acc_stderr\": 0.03286745312567961,\n \"acc_norm\": 0.600896860986547,\n \"acc_norm_stderr\": 0.03286745312567961\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.042438692422305246,\n \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.042438692422305246\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.743801652892562,\n \"acc_stderr\": 0.039849796533028725,\n \"acc_norm\": 0.743801652892562,\n \"acc_norm_stderr\": 0.039849796533028725\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.04668408033024931,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.04668408033024931\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.588957055214724,\n \"acc_stderr\": 0.038656978537853624,\n \"acc_norm\": 0.588957055214724,\n \"acc_norm_stderr\": 0.038656978537853624\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6310679611650486,\n \"acc_stderr\": 0.0477761518115674,\n \"acc_norm\": 0.6310679611650486,\n \"acc_norm_stderr\": 0.0477761518115674\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7692307692307693,\n \"acc_stderr\": 0.027601921381417604,\n \"acc_norm\": 0.7692307692307693,\n \"acc_norm_stderr\": 0.027601921381417604\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6998722860791826,\n \"acc_stderr\": 0.016389249691317425,\n \"acc_norm\": 0.6998722860791826,\n \"acc_norm_stderr\": 0.016389249691317425\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6127167630057804,\n \"acc_stderr\": 0.026226158605124658,\n \"acc_norm\": 0.6127167630057804,\n \"acc_norm_stderr\": 0.026226158605124658\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.36201117318435755,\n \"acc_stderr\": 0.01607306735015309,\n \"acc_norm\": 0.36201117318435755,\n \"acc_norm_stderr\": 0.01607306735015309\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.028452639985088013,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.028452639985088013\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5691318327974276,\n \"acc_stderr\": 0.02812534098397271,\n \"acc_norm\": 0.5691318327974276,\n \"acc_norm_stderr\": 0.02812534098397271\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5617283950617284,\n \"acc_stderr\": 0.027607914087400473,\n \"acc_norm\": 0.5617283950617284,\n \"acc_norm_stderr\": 0.027607914087400473\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.40425531914893614,\n \"acc_stderr\": 0.02927553215970472,\n \"acc_norm\": 0.40425531914893614,\n \"acc_norm_stderr\": 0.02927553215970472\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3200782268578879,\n \"acc_stderr\": 0.011914791947638533,\n \"acc_norm\": 0.3200782268578879,\n \"acc_norm_stderr\": 0.011914791947638533\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4522058823529412,\n \"acc_stderr\": 0.030233758551596452,\n \"acc_norm\": 0.4522058823529412,\n \"acc_norm_stderr\": 0.030233758551596452\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5212418300653595,\n \"acc_stderr\": 0.020209572388600248,\n \"acc_norm\": 0.5212418300653595,\n \"acc_norm_stderr\": 0.020209572388600248\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.04582004841505416,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.04582004841505416\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6122448979591837,\n \"acc_stderr\": 0.03119223072679566,\n \"acc_norm\": 0.6122448979591837,\n \"acc_norm_stderr\": 0.03119223072679566\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6965174129353234,\n \"acc_stderr\": 0.03251006816458618,\n \"acc_norm\": 0.6965174129353234,\n \"acc_norm_stderr\": 0.03251006816458618\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4397590361445783,\n \"acc_stderr\": 0.03864139923699121,\n \"acc_norm\": 0.4397590361445783,\n \"acc_norm_stderr\": 0.03864139923699121\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7192982456140351,\n \"acc_stderr\": 0.034462962170884265,\n \"acc_norm\": 0.7192982456140351,\n \"acc_norm_stderr\": 0.034462962170884265\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.27539779681762544,\n \"mc1_stderr\": 0.015638135667775516,\n \"mc2\": 0.41245749372615664,\n \"mc2_stderr\": 0.014877902993478492\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7237569060773481,\n \"acc_stderr\": 0.01256681501569816\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.19408642911296436,\n \"acc_stderr\": 0.010893918308192424\n }\n}\n```", "repo_url": "https://huggingface.co/Cartinoe5930/TIES-Merging", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|arc:challenge|25_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|gsm8k|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hellaswag|10_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T14-48-42.091163.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["**/details_harness|winogrande|5_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T14-48-42.091163.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T14_48_42.091163", "path": ["results_2024-01-23T14-48-42.091163.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T14-48-42.091163.parquet"]}]}]}
2024-01-23T14:51:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Cartinoe5930/TIES-Merging Dataset automatically created during the evaluation run of model Cartinoe5930/TIES-Merging on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T14:48:42.091163(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Cartinoe5930/TIES-Merging\n\n\n\nDataset automatically created during the evaluation run of model Cartinoe5930/TIES-Merging on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T14:48:42.091163(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Cartinoe5930/TIES-Merging\n\n\n\nDataset automatically created during the evaluation run of model Cartinoe5930/TIES-Merging on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T14:48:42.091163(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
a15242c42c03d1035b3373d633b7e8d7b0055030
# Dataset Card for "multishort_id_rename_filtered" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
CJWeiss/multishort_id_rename_filtered
[ "region:us" ]
2024-01-23T14:56:18+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1235418985.444964, "num_examples": 2308}, {"name": "test", "num_bytes": 222910686.75583863, "num_examples": 467}, {"name": "valid", "num_bytes": 185390598.79617834, "num_examples": 307}], "download_size": 593169359, "dataset_size": 1643720270.996981}}
2024-01-23T14:56:57+00:00
[]
[]
TAGS #region-us
# Dataset Card for "multishort_id_rename_filtered" More Information needed
[ "# Dataset Card for \"multishort_id_rename_filtered\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"multishort_id_rename_filtered\"\n\nMore Information needed" ]
ae03e81c2f04a20e989d881377606756eae82d49
# Dataset Card for "multitiny_id_rename_filtered" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
CJWeiss/multitiny_id_rename_filtered
[ "region:us" ]
2024-01-23T14:57:17+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 761355440.8153079, "num_examples": 1167}, {"name": "test", "num_bytes": 117156339.3625, "num_examples": 237}, {"name": "valid", "num_bytes": 113353675.37888199, "num_examples": 156}], "download_size": 346274224, "dataset_size": 991865455.5566897}}
2024-01-23T14:57:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for "multitiny_id_rename_filtered" More Information needed
[ "# Dataset Card for \"multitiny_id_rename_filtered\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"multitiny_id_rename_filtered\"\n\nMore Information needed" ]
3eb6f2824d60ae8b00bf848a7e582b0f568d407c
# Dataset Card for Evaluation run of abhinand/telugu-llama-7b-instruct-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [abhinand/telugu-llama-7b-instruct-v0.1](https://huggingface.co/abhinand/telugu-llama-7b-instruct-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_abhinand__telugu-llama-7b-instruct-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T18:07:17.451860](https://huggingface.co/datasets/open-llm-leaderboard/details_abhinand__telugu-llama-7b-instruct-v0.1/blob/main/results_2024-01-23T18-07-17.451860.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24045369051307708, "acc_stderr": 0.029966214967594704, "acc_norm": 0.24113587081419893, "acc_norm_stderr": 0.030748707255235257, "mc1": 0.29253365973072215, "mc1_stderr": 0.015925597445286165, "mc2": 0.49051835602774946, "mc2_stderr": 0.015311522184683459 }, "harness|arc:challenge|25": { "acc": 0.3438566552901024, "acc_stderr": 0.013880644570156211, "acc_norm": 0.371160409556314, "acc_norm_stderr": 0.014117971901142813 }, "harness|hellaswag|10": { "acc": 0.5331607249551882, "acc_stderr": 0.004978795454216732, "acc_norm": 0.6792471619199363, "acc_norm_stderr": 0.00465812015223082 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20256410256410257, "acc_stderr": 0.020377660970371372, "acc_norm": 0.20256410256410257, "acc_norm_stderr": 0.020377660970371372 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.02974504857267404, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.02974504857267404 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.29253365973072215, "mc1_stderr": 0.015925597445286165, "mc2": 0.49051835602774946, "mc2_stderr": 0.015311522184683459 }, "harness|winogrande|5": { "acc": 0.6140489344909235, "acc_stderr": 0.013682036993397402 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_abhinand__telugu-llama-7b-instruct-v0.1
[ "region:us" ]
2024-01-23T14:58:51+00:00
{"pretty_name": "Evaluation run of abhinand/telugu-llama-7b-instruct-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [abhinand/telugu-llama-7b-instruct-v0.1](https://huggingface.co/abhinand/telugu-llama-7b-instruct-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_abhinand__telugu-llama-7b-instruct-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T18:07:17.451860](https://huggingface.co/datasets/open-llm-leaderboard/details_abhinand__telugu-llama-7b-instruct-v0.1/blob/main/results_2024-01-23T18-07-17.451860.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24045369051307708,\n \"acc_stderr\": 0.029966214967594704,\n \"acc_norm\": 0.24113587081419893,\n \"acc_norm_stderr\": 0.030748707255235257,\n \"mc1\": 0.29253365973072215,\n \"mc1_stderr\": 0.015925597445286165,\n \"mc2\": 0.49051835602774946,\n \"mc2_stderr\": 0.015311522184683459\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.3438566552901024,\n \"acc_stderr\": 0.013880644570156211,\n \"acc_norm\": 0.371160409556314,\n \"acc_norm_stderr\": 0.014117971901142813\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5331607249551882,\n \"acc_stderr\": 0.004978795454216732,\n \"acc_norm\": 0.6792471619199363,\n \"acc_norm_stderr\": 0.00465812015223082\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.29253365973072215,\n \"mc1_stderr\": 0.015925597445286165,\n \"mc2\": 0.49051835602774946,\n \"mc2_stderr\": 0.015311522184683459\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6140489344909235,\n \"acc_stderr\": 0.013682036993397402\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/abhinand/telugu-llama-7b-instruct-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|arc:challenge|25_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|arc:challenge|25_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|gsm8k|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|gsm8k|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hellaswag|10_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hellaswag|10_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T14-56-34.848721.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T18-07-17.451860.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["**/details_harness|winogrande|5_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["**/details_harness|winogrande|5_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T18-07-17.451860.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_23T14_56_34.848721", "path": ["results_2024-01-23T14-56-34.848721.parquet"]}, {"split": "2024_01_23T18_07_17.451860", "path": ["results_2024-01-23T18-07-17.451860.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T18-07-17.451860.parquet"]}]}]}
2024-01-23T18:09:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of abhinand/telugu-llama-7b-instruct-v0.1 Dataset automatically created during the evaluation run of model abhinand/telugu-llama-7b-instruct-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T18:07:17.451860(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of abhinand/telugu-llama-7b-instruct-v0.1\n\n\n\nDataset automatically created during the evaluation run of model abhinand/telugu-llama-7b-instruct-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T18:07:17.451860(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of abhinand/telugu-llama-7b-instruct-v0.1\n\n\n\nDataset automatically created during the evaluation run of model abhinand/telugu-llama-7b-instruct-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T18:07:17.451860(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
bd14f7dde43f6fe88a95dc17461fe77786683410
# Dataset Card for "multilong_id_rename_filtered" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
CJWeiss/multilong_id_rename_filtered
[ "region:us" ]
2024-01-23T15:01:59+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1357698457.0, "num_examples": 3358}, {"name": "test", "num_bytes": 259339843.42521995, "num_examples": 678}, {"name": "valid", "num_bytes": 203294288.46799117, "num_examples": 446}], "download_size": 669359157, "dataset_size": 1820332588.8932111}}
2024-01-23T15:02:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for "multilong_id_rename_filtered" More Information needed
[ "# Dataset Card for \"multilong_id_rename_filtered\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"multilong_id_rename_filtered\"\n\nMore Information needed" ]
7daa0cc5a90ec4a6ec58a1e39daf344fcbc50f21
# Dataset Card for "eurlexsum_id_rename_filtered" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
CJWeiss/eurlexsum_id_rename_filtered
[ "region:us" ]
2024-01-23T15:03:00+00:00
{"dataset_info": {"features": [{"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}, {"name": "id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 92703471.14893617, "num_examples": 1127}, {"name": "test", "num_bytes": 28072606.0, "num_examples": 225}, {"name": "valid", "num_bytes": 19930581.0, "num_examples": 151}], "download_size": 55740650, "dataset_size": 140706658.14893615}}
2024-01-23T15:03:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for "eurlexsum_id_rename_filtered" More Information needed
[ "# Dataset Card for \"eurlexsum_id_rename_filtered\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"eurlexsum_id_rename_filtered\"\n\nMore Information needed" ]