sha
stringlengths
40
40
text
stringlengths
1
13.4M
id
stringlengths
2
117
tags
sequencelengths
1
7.91k
created_at
stringlengths
25
25
metadata
stringlengths
2
875k
last_modified
stringlengths
25
25
arxiv
sequencelengths
0
25
languages
sequencelengths
0
7.91k
tags_str
stringlengths
17
159k
text_str
stringlengths
1
447k
text_lists
sequencelengths
0
352
processed_texts
sequencelengths
1
353
1defdf8227071fe0a417ca25642283505c4dbbec
gemini μ—μ„œ μƒμ„±ν•œ Persona 와 질문, 닡변을 담은 λ°μ΄ν„°μ…‹μž…λ‹ˆλ‹€.
yatsby/persona_chat
[ "task_categories:conversational", "language:ko", "region:us" ]
2024-02-05T08:54:05+00:00
{"language": ["ko"], "task_categories": ["conversational"], "dataset_info": {"features": [{"name": "persona", "struct": [{"name": "\ub098\uc774", "dtype": "string"}, {"name": "\ube44\ubc00", "dtype": "string"}, {"name": "\uc131\uaca9", "dtype": "string"}, {"name": "\uc678\ubaa8", "dtype": "string"}, {"name": "\uc774\ub984", "dtype": "string"}, {"name": "\uc774\uc0c1", "dtype": "string"}, {"name": "\uc9c1\uc5c5", "dtype": "string"}]}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 47910381, "num_examples": 21973}, {"name": "valid", "num_bytes": 2519850, "num_examples": 1160}], "download_size": 25171790, "dataset_size": 50430231}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "valid", "path": "data/valid-*"}]}]}
2024-02-16T10:11:01+00:00
[]
[ "ko" ]
TAGS #task_categories-conversational #language-Korean #region-us
gemini μ—μ„œ μƒμ„±ν•œ Persona 와 질문, 닡변을 담은 λ°μ΄ν„°μ…‹μž…λ‹ˆλ‹€.
[]
[ "TAGS\n#task_categories-conversational #language-Korean #region-us \n" ]
f69495d9a725d7b2cace28f81bb68cb2bedd0780
# Dataset Card for Evaluation run of Stopwolf/Cerberus-7B-slerp <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Stopwolf/Cerberus-7B-slerp](https://huggingface.co/Stopwolf/Cerberus-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Stopwolf__Cerberus-7B-slerp", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-05T09:08:12.484477](https://huggingface.co/datasets/open-llm-leaderboard/details_Stopwolf__Cerberus-7B-slerp/blob/main/results_2024-02-05T09-08-12.484477.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.629678937070562, "acc_stderr": 0.03233409511653798, "acc_norm": 0.6376621349483272, "acc_norm_stderr": 0.0330388306090536, "mc1": 0.44920440636474906, "mc1_stderr": 0.017412941986115302, "mc2": 0.6135075599156891, "mc2_stderr": 0.01558423055084616 }, "harness|arc:challenge|25": { "acc": 0.6723549488054608, "acc_stderr": 0.01371584794071934, "acc_norm": 0.6953924914675768, "acc_norm_stderr": 0.013449522109932487 }, "harness|hellaswag|10": { "acc": 0.6920932085241984, "acc_stderr": 0.004606843344517468, "acc_norm": 0.8733320055765784, "acc_norm_stderr": 0.0033192094001351165 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6, "acc_stderr": 0.04232073695151589, "acc_norm": 0.6, "acc_norm_stderr": 0.04232073695151589 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6776315789473685, "acc_stderr": 0.038035102483515854, "acc_norm": 0.6776315789473685, "acc_norm_stderr": 0.038035102483515854 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6981132075471698, "acc_stderr": 0.02825420034443866, "acc_norm": 0.6981132075471698, "acc_norm_stderr": 0.02825420034443866 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7291666666666666, "acc_stderr": 0.03716177437566017, "acc_norm": 0.7291666666666666, "acc_norm_stderr": 0.03716177437566017 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6416184971098265, "acc_stderr": 0.036563436533531585, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.036563436533531585 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.04784060704105653, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105653 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5276595744680851, "acc_stderr": 0.03263597118409769, "acc_norm": 0.5276595744680851, "acc_norm_stderr": 0.03263597118409769 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.47368421052631576, "acc_stderr": 0.046970851366478626, "acc_norm": 0.47368421052631576, "acc_norm_stderr": 0.046970851366478626 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.04154659671707548, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.04154659671707548 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42063492063492064, "acc_stderr": 0.025424835086924, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.025424835086924 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.02366421667164251, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.02366421667164251 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.03517945038691063, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.029376616484945627, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.029376616484945627 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8860103626943006, "acc_stderr": 0.02293514405391943, "acc_norm": 0.8860103626943006, "acc_norm_stderr": 0.02293514405391943 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6230769230769231, "acc_stderr": 0.024570975364225995, "acc_norm": 0.6230769230769231, "acc_norm_stderr": 0.024570975364225995 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.028493465091028593, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.028493465091028593 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6176470588235294, "acc_stderr": 0.031566630992154156, "acc_norm": 0.6176470588235294, "acc_norm_stderr": 0.031566630992154156 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8201834862385321, "acc_stderr": 0.016465345467391534, "acc_norm": 0.8201834862385321, "acc_norm_stderr": 0.016465345467391534 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49537037037037035, "acc_stderr": 0.03409825519163572, "acc_norm": 0.49537037037037035, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8235294117647058, "acc_stderr": 0.02675640153807897, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.02675640153807897 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7763713080168776, "acc_stderr": 0.027123298205229962, "acc_norm": 0.7763713080168776, "acc_norm_stderr": 0.027123298205229962 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6816143497757847, "acc_stderr": 0.03126580522513713, "acc_norm": 0.6816143497757847, "acc_norm_stderr": 0.03126580522513713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7709923664122137, "acc_stderr": 0.036853466317118506, "acc_norm": 0.7709923664122137, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228732, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228732 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0395783547198098, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.0335195387952127, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.0335195387952127 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5178571428571429, "acc_stderr": 0.04742762361243011, "acc_norm": 0.5178571428571429, "acc_norm_stderr": 0.04742762361243011 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.02158649400128136, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.02158649400128136 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8160919540229885, "acc_stderr": 0.013853724170922531, "acc_norm": 0.8160919540229885, "acc_norm_stderr": 0.013853724170922531 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.708092485549133, "acc_stderr": 0.024476994076247326, "acc_norm": 0.708092485549133, "acc_norm_stderr": 0.024476994076247326 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4212290502793296, "acc_stderr": 0.016513676031179595, "acc_norm": 0.4212290502793296, "acc_norm_stderr": 0.016513676031179595 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137894, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137894 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7202572347266881, "acc_stderr": 0.025494259350694912, "acc_norm": 0.7202572347266881, "acc_norm_stderr": 0.025494259350694912 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7129629629629629, "acc_stderr": 0.025171041915309684, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.025171041915309684 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4645390070921986, "acc_stderr": 0.029752389657427047, "acc_norm": 0.4645390070921986, "acc_norm_stderr": 0.029752389657427047 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4589308996088657, "acc_stderr": 0.012727084826799804, "acc_norm": 0.4589308996088657, "acc_norm_stderr": 0.012727084826799804 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6727941176470589, "acc_stderr": 0.028501452860396556, "acc_norm": 0.6727941176470589, "acc_norm_stderr": 0.028501452860396556 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6405228758169934, "acc_stderr": 0.01941253924203216, "acc_norm": 0.6405228758169934, "acc_norm_stderr": 0.01941253924203216 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616913, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616913 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5120481927710844, "acc_stderr": 0.03891364495835817, "acc_norm": 0.5120481927710844, "acc_norm_stderr": 0.03891364495835817 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.44920440636474906, "mc1_stderr": 0.017412941986115302, "mc2": 0.6135075599156891, "mc2_stderr": 0.01558423055084616 }, "harness|winogrande|5": { "acc": 0.8129439621152328, "acc_stderr": 0.010959716435242912 }, "harness|gsm8k|5": { "acc": 0.17968157695223655, "acc_stderr": 0.010575119964242236 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Stopwolf__Cerberus-7B-slerp
[ "region:us" ]
2024-02-05T09:10:34+00:00
{"pretty_name": "Evaluation run of Stopwolf/Cerberus-7B-slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [Stopwolf/Cerberus-7B-slerp](https://huggingface.co/Stopwolf/Cerberus-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Stopwolf__Cerberus-7B-slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-05T09:08:12.484477](https://huggingface.co/datasets/open-llm-leaderboard/details_Stopwolf__Cerberus-7B-slerp/blob/main/results_2024-02-05T09-08-12.484477.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.629678937070562,\n \"acc_stderr\": 0.03233409511653798,\n \"acc_norm\": 0.6376621349483272,\n \"acc_norm_stderr\": 0.0330388306090536,\n \"mc1\": 0.44920440636474906,\n \"mc1_stderr\": 0.017412941986115302,\n \"mc2\": 0.6135075599156891,\n \"mc2_stderr\": 0.01558423055084616\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6723549488054608,\n \"acc_stderr\": 0.01371584794071934,\n \"acc_norm\": 0.6953924914675768,\n \"acc_norm_stderr\": 0.013449522109932487\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6920932085241984,\n \"acc_stderr\": 0.004606843344517468,\n \"acc_norm\": 0.8733320055765784,\n \"acc_norm_stderr\": 0.0033192094001351165\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04232073695151589,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04232073695151589\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6776315789473685,\n \"acc_stderr\": 0.038035102483515854,\n \"acc_norm\": 0.6776315789473685,\n \"acc_norm_stderr\": 0.038035102483515854\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6981132075471698,\n \"acc_stderr\": 0.02825420034443866,\n \"acc_norm\": 0.6981132075471698,\n \"acc_norm_stderr\": 0.02825420034443866\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7291666666666666,\n \"acc_stderr\": 0.03716177437566017,\n \"acc_norm\": 0.7291666666666666,\n \"acc_norm_stderr\": 0.03716177437566017\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.036563436533531585,\n \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.036563436533531585\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105653,\n \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105653\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5276595744680851,\n \"acc_stderr\": 0.03263597118409769,\n \"acc_norm\": 0.5276595744680851,\n \"acc_norm_stderr\": 0.03263597118409769\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.04154659671707548,\n \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.04154659671707548\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.025424835086924,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.025424835086924\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.02366421667164251,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.02366421667164251\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.029376616484945627,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.029376616484945627\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.02293514405391943,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.02293514405391943\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6230769230769231,\n \"acc_stderr\": 0.024570975364225995,\n \"acc_norm\": 0.6230769230769231,\n \"acc_norm_stderr\": 0.024570975364225995\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028593,\n \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028593\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6176470588235294,\n \"acc_stderr\": 0.031566630992154156,\n \"acc_norm\": 0.6176470588235294,\n \"acc_norm_stderr\": 0.031566630992154156\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8201834862385321,\n \"acc_stderr\": 0.016465345467391534,\n \"acc_norm\": 0.8201834862385321,\n \"acc_norm_stderr\": 0.016465345467391534\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49537037037037035,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.49537037037037035,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.02675640153807897,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.02675640153807897\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7763713080168776,\n \"acc_stderr\": 0.027123298205229962,\n \"acc_norm\": 0.7763713080168776,\n \"acc_norm_stderr\": 0.027123298205229962\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228732,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228732\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.0335195387952127,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.0335195387952127\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5178571428571429,\n \"acc_stderr\": 0.04742762361243011,\n \"acc_norm\": 0.5178571428571429,\n \"acc_norm_stderr\": 0.04742762361243011\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.02158649400128136,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.02158649400128136\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8160919540229885,\n \"acc_stderr\": 0.013853724170922531,\n \"acc_norm\": 0.8160919540229885,\n \"acc_norm_stderr\": 0.013853724170922531\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.708092485549133,\n \"acc_stderr\": 0.024476994076247326,\n \"acc_norm\": 0.708092485549133,\n \"acc_norm_stderr\": 0.024476994076247326\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4212290502793296,\n \"acc_stderr\": 0.016513676031179595,\n \"acc_norm\": 0.4212290502793296,\n \"acc_norm_stderr\": 0.016513676031179595\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137894,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137894\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7202572347266881,\n \"acc_stderr\": 0.025494259350694912,\n \"acc_norm\": 0.7202572347266881,\n \"acc_norm_stderr\": 0.025494259350694912\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.025171041915309684,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.025171041915309684\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4645390070921986,\n \"acc_stderr\": 0.029752389657427047,\n \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.029752389657427047\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4589308996088657,\n \"acc_stderr\": 0.012727084826799804,\n \"acc_norm\": 0.4589308996088657,\n \"acc_norm_stderr\": 0.012727084826799804\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.028501452860396556,\n \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.028501452860396556\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6405228758169934,\n \"acc_stderr\": 0.01941253924203216,\n \"acc_norm\": 0.6405228758169934,\n \"acc_norm_stderr\": 0.01941253924203216\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616913,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616913\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.03891364495835817,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.03891364495835817\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.44920440636474906,\n \"mc1_stderr\": 0.017412941986115302,\n \"mc2\": 0.6135075599156891,\n \"mc2_stderr\": 0.01558423055084616\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8129439621152328,\n \"acc_stderr\": 0.010959716435242912\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.17968157695223655,\n \"acc_stderr\": 0.010575119964242236\n }\n}\n```", "repo_url": "https://huggingface.co/Stopwolf/Cerberus-7B-slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|arc:challenge|25_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|gsm8k|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hellaswag|10_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T09-08-12.484477.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["**/details_harness|winogrande|5_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-05T09-08-12.484477.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_05T09_08_12.484477", "path": ["results_2024-02-05T09-08-12.484477.parquet"]}, {"split": "latest", "path": ["results_2024-02-05T09-08-12.484477.parquet"]}]}]}
2024-02-05T09:10:56+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Stopwolf/Cerberus-7B-slerp Dataset automatically created during the evaluation run of model Stopwolf/Cerberus-7B-slerp on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-05T09:08:12.484477(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Stopwolf/Cerberus-7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model Stopwolf/Cerberus-7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T09:08:12.484477(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Stopwolf/Cerberus-7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model Stopwolf/Cerberus-7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T09:08:12.484477(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
1eabbf7ac6fa3442ebf44e82151aeeb52f89c647
# Dataset Card for Evaluation run of Eric111/Mayo <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Eric111/Mayo](https://huggingface.co/Eric111/Mayo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Eric111__Mayo", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-05T09:14:00.989301](https://huggingface.co/datasets/open-llm-leaderboard/details_Eric111__Mayo/blob/main/results_2024-02-05T09-14-00.989301.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6601110051163788, "acc_stderr": 0.03167821761716325, "acc_norm": 0.660057544464853, "acc_norm_stderr": 0.03233445364773851, "mc1": 0.4320685434516524, "mc1_stderr": 0.017341202394988257, "mc2": 0.6093078415810402, "mc2_stderr": 0.015205905229495022 }, "harness|arc:challenge|25": { "acc": 0.6621160409556314, "acc_stderr": 0.013822047922283514, "acc_norm": 0.7013651877133106, "acc_norm_stderr": 0.013374078615068742 }, "harness|hellaswag|10": { "acc": 0.6697868950408286, "acc_stderr": 0.004693285694663837, "acc_norm": 0.8626767576180043, "acc_norm_stderr": 0.0034348485253881864 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6518518518518519, "acc_stderr": 0.041153246103369526, "acc_norm": 0.6518518518518519, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6710526315789473, "acc_stderr": 0.03823428969926605, "acc_norm": 0.6710526315789473, "acc_norm_stderr": 0.03823428969926605 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.720754716981132, "acc_stderr": 0.027611163402399715, "acc_norm": 0.720754716981132, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7916666666666666, "acc_stderr": 0.03396116205845333, "acc_norm": 0.7916666666666666, "acc_norm_stderr": 0.03396116205845333 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6878612716763006, "acc_stderr": 0.03533133389323657, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.03533133389323657 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816506, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.02544636563440678, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.02544636563440678 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7903225806451613, "acc_stderr": 0.023157879349083522, "acc_norm": 0.7903225806451613, "acc_norm_stderr": 0.023157879349083522 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.031922715695483016, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.031922715695483016 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.797979797979798, "acc_stderr": 0.02860620428922987, "acc_norm": 0.797979797979798, "acc_norm_stderr": 0.02860620428922987 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033477, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033477 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6820512820512821, "acc_stderr": 0.023610884308927865, "acc_norm": 0.6820512820512821, "acc_norm_stderr": 0.023610884308927865 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34074074074074073, "acc_stderr": 0.028897748741131147, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.028897748741131147 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.680672268907563, "acc_stderr": 0.030283995525884396, "acc_norm": 0.680672268907563, "acc_norm_stderr": 0.030283995525884396 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8568807339449541, "acc_stderr": 0.015014462497168585, "acc_norm": 0.8568807339449541, "acc_norm_stderr": 0.015014462497168585 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5231481481481481, "acc_stderr": 0.03406315360711507, "acc_norm": 0.5231481481481481, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8774509803921569, "acc_stderr": 0.02301538973245826, "acc_norm": 0.8774509803921569, "acc_norm_stderr": 0.02301538973245826 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8185654008438819, "acc_stderr": 0.025085961144579654, "acc_norm": 0.8185654008438819, "acc_norm_stderr": 0.025085961144579654 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057221, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057221 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.03498149385462472, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.03498149385462472 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8181818181818182, "acc_stderr": 0.03520893951097653, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.03520893951097653 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5, "acc_stderr": 0.04745789978762494, "acc_norm": 0.5, "acc_norm_stderr": 0.04745789978762494 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8931623931623932, "acc_stderr": 0.02023714900899093, "acc_norm": 0.8931623931623932, "acc_norm_stderr": 0.02023714900899093 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.842911877394636, "acc_stderr": 0.013012459322650714, "acc_norm": 0.842911877394636, "acc_norm_stderr": 0.013012459322650714 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7572254335260116, "acc_stderr": 0.023083658586984204, "acc_norm": 0.7572254335260116, "acc_norm_stderr": 0.023083658586984204 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3675977653631285, "acc_stderr": 0.01612554382355295, "acc_norm": 0.3675977653631285, "acc_norm_stderr": 0.01612554382355295 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7254901960784313, "acc_stderr": 0.025553169991826528, "acc_norm": 0.7254901960784313, "acc_norm_stderr": 0.025553169991826528 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7363344051446945, "acc_stderr": 0.02502553850053234, "acc_norm": 0.7363344051446945, "acc_norm_stderr": 0.02502553850053234 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7561728395061729, "acc_stderr": 0.023891879541959603, "acc_norm": 0.7561728395061729, "acc_norm_stderr": 0.023891879541959603 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5212765957446809, "acc_stderr": 0.029800481645628693, "acc_norm": 0.5212765957446809, "acc_norm_stderr": 0.029800481645628693 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.48826597131681876, "acc_stderr": 0.012766719019686724, "acc_norm": 0.48826597131681876, "acc_norm_stderr": 0.012766719019686724 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7132352941176471, "acc_stderr": 0.027472274473233818, "acc_norm": 0.7132352941176471, "acc_norm_stderr": 0.027472274473233818 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6699346405228758, "acc_stderr": 0.019023726160724553, "acc_norm": 0.6699346405228758, "acc_norm_stderr": 0.019023726160724553 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8507462686567164, "acc_stderr": 0.02519692987482706, "acc_norm": 0.8507462686567164, "acc_norm_stderr": 0.02519692987482706 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5662650602409639, "acc_stderr": 0.03858158940685516, "acc_norm": 0.5662650602409639, "acc_norm_stderr": 0.03858158940685516 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.4320685434516524, "mc1_stderr": 0.017341202394988257, "mc2": 0.6093078415810402, "mc2_stderr": 0.015205905229495022 }, "harness|winogrande|5": { "acc": 0.8216258879242304, "acc_stderr": 0.01075935201485593 }, "harness|gsm8k|5": { "acc": 0.7338893100833965, "acc_stderr": 0.012172750939040316 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Eric111__Mayo
[ "region:us" ]
2024-02-05T09:16:18+00:00
{"pretty_name": "Evaluation run of Eric111/Mayo", "dataset_summary": "Dataset automatically created during the evaluation run of model [Eric111/Mayo](https://huggingface.co/Eric111/Mayo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Eric111__Mayo\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-05T09:14:00.989301](https://huggingface.co/datasets/open-llm-leaderboard/details_Eric111__Mayo/blob/main/results_2024-02-05T09-14-00.989301.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6601110051163788,\n \"acc_stderr\": 0.03167821761716325,\n \"acc_norm\": 0.660057544464853,\n \"acc_norm_stderr\": 0.03233445364773851,\n \"mc1\": 0.4320685434516524,\n \"mc1_stderr\": 0.017341202394988257,\n \"mc2\": 0.6093078415810402,\n \"mc2_stderr\": 0.015205905229495022\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6621160409556314,\n \"acc_stderr\": 0.013822047922283514,\n \"acc_norm\": 0.7013651877133106,\n \"acc_norm_stderr\": 0.013374078615068742\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6697868950408286,\n \"acc_stderr\": 0.004693285694663837,\n \"acc_norm\": 0.8626767576180043,\n \"acc_norm_stderr\": 0.0034348485253881864\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6710526315789473,\n \"acc_stderr\": 0.03823428969926605,\n \"acc_norm\": 0.6710526315789473,\n \"acc_norm_stderr\": 0.03823428969926605\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.720754716981132,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.720754716981132,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7916666666666666,\n \"acc_stderr\": 0.03396116205845333,\n \"acc_norm\": 0.7916666666666666,\n \"acc_norm_stderr\": 0.03396116205845333\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.03533133389323657,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.03533133389323657\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.02544636563440678,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.02544636563440678\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.023157879349083522,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.023157879349083522\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.031922715695483016,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.031922715695483016\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.02860620428922987,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.02860620428922987\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033477,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033477\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6820512820512821,\n \"acc_stderr\": 0.023610884308927865,\n \"acc_norm\": 0.6820512820512821,\n \"acc_norm_stderr\": 0.023610884308927865\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34074074074074073,\n \"acc_stderr\": 0.028897748741131147,\n \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.028897748741131147\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.030283995525884396,\n \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.030283995525884396\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8568807339449541,\n \"acc_stderr\": 0.015014462497168585,\n \"acc_norm\": 0.8568807339449541,\n \"acc_norm_stderr\": 0.015014462497168585\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8774509803921569,\n \"acc_stderr\": 0.02301538973245826,\n \"acc_norm\": 0.8774509803921569,\n \"acc_norm_stderr\": 0.02301538973245826\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8185654008438819,\n \"acc_stderr\": 0.025085961144579654,\n \"acc_norm\": 0.8185654008438819,\n \"acc_norm_stderr\": 0.025085961144579654\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.03498149385462472,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.03498149385462472\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.03520893951097653,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.03520893951097653\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04745789978762494,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04745789978762494\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8931623931623932,\n \"acc_stderr\": 0.02023714900899093,\n \"acc_norm\": 0.8931623931623932,\n \"acc_norm_stderr\": 0.02023714900899093\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.842911877394636,\n \"acc_stderr\": 0.013012459322650714,\n \"acc_norm\": 0.842911877394636,\n \"acc_norm_stderr\": 0.013012459322650714\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7572254335260116,\n \"acc_stderr\": 0.023083658586984204,\n \"acc_norm\": 0.7572254335260116,\n \"acc_norm_stderr\": 0.023083658586984204\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3675977653631285,\n \"acc_stderr\": 0.01612554382355295,\n \"acc_norm\": 0.3675977653631285,\n \"acc_norm_stderr\": 0.01612554382355295\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.025553169991826528,\n \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.025553169991826528\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7363344051446945,\n \"acc_stderr\": 0.02502553850053234,\n \"acc_norm\": 0.7363344051446945,\n \"acc_norm_stderr\": 0.02502553850053234\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7561728395061729,\n \"acc_stderr\": 0.023891879541959603,\n \"acc_norm\": 0.7561728395061729,\n \"acc_norm_stderr\": 0.023891879541959603\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5212765957446809,\n \"acc_stderr\": 0.029800481645628693,\n \"acc_norm\": 0.5212765957446809,\n \"acc_norm_stderr\": 0.029800481645628693\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.48826597131681876,\n \"acc_stderr\": 0.012766719019686724,\n \"acc_norm\": 0.48826597131681876,\n \"acc_norm_stderr\": 0.012766719019686724\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7132352941176471,\n \"acc_stderr\": 0.027472274473233818,\n \"acc_norm\": 0.7132352941176471,\n \"acc_norm_stderr\": 0.027472274473233818\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6699346405228758,\n \"acc_stderr\": 0.019023726160724553,\n \"acc_norm\": 0.6699346405228758,\n \"acc_norm_stderr\": 0.019023726160724553\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n \"acc_stderr\": 0.02519692987482706,\n \"acc_norm\": 0.8507462686567164,\n \"acc_norm_stderr\": 0.02519692987482706\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685516,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685516\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4320685434516524,\n \"mc1_stderr\": 0.017341202394988257,\n \"mc2\": 0.6093078415810402,\n \"mc2_stderr\": 0.015205905229495022\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8216258879242304,\n \"acc_stderr\": 0.01075935201485593\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7338893100833965,\n \"acc_stderr\": 0.012172750939040316\n }\n}\n```", "repo_url": "https://huggingface.co/Eric111/Mayo", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|arc:challenge|25_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|gsm8k|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hellaswag|10_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T09-14-00.989301.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["**/details_harness|winogrande|5_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-05T09-14-00.989301.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_05T09_14_00.989301", "path": ["results_2024-02-05T09-14-00.989301.parquet"]}, {"split": "latest", "path": ["results_2024-02-05T09-14-00.989301.parquet"]}]}]}
2024-02-05T09:16:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Eric111/Mayo Dataset automatically created during the evaluation run of model Eric111/Mayo on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-05T09:14:00.989301(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Eric111/Mayo\n\n\n\nDataset automatically created during the evaluation run of model Eric111/Mayo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T09:14:00.989301(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Eric111/Mayo\n\n\n\nDataset automatically created during the evaluation run of model Eric111/Mayo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T09:14:00.989301(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
720c6fe8efdb4e749ec92b45e3e834a71b09606b
# Dataset Card for "find_marker_both_sent_train_400_eval_40_in_context" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/find_marker_both_sent_train_400_eval_40_in_context
[ "region:us" ]
2024-02-05T09:23:12+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "context", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 3738032, "num_examples": 1994}, {"name": "validation", "num_bytes": 383715, "num_examples": 200}], "download_size": 833365, "dataset_size": 4121747}}
2024-02-05T09:23:20+00:00
[]
[]
TAGS #region-us
# Dataset Card for "find_marker_both_sent_train_400_eval_40_in_context" More Information needed
[ "# Dataset Card for \"find_marker_both_sent_train_400_eval_40_in_context\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"find_marker_both_sent_train_400_eval_40_in_context\"\n\nMore Information needed" ]
4124d35fb345f185be5e8f19a7557e7cc441f34b
# Dataset Card for Evaluation run of paulml/NeuralOmniWestBeaglake-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [paulml/NeuralOmniWestBeaglake-7B](https://huggingface.co/paulml/NeuralOmniWestBeaglake-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_paulml__NeuralOmniWestBeaglake-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-05T09:58:00.255850](https://huggingface.co/datasets/open-llm-leaderboard/details_paulml__NeuralOmniWestBeaglake-7B/blob/main/results_2024-02-05T09-58-00.255850.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6452309636710489, "acc_stderr": 0.03228854730174679, "acc_norm": 0.6455942939381135, "acc_norm_stderr": 0.03295573791186191, "mc1": 0.6070991432068543, "mc1_stderr": 0.017097248285233065, "mc2": 0.7509762464971609, "mc2_stderr": 0.014412765533767163 }, "harness|arc:challenge|25": { "acc": 0.7167235494880546, "acc_stderr": 0.013167478735134575, "acc_norm": 0.7372013651877133, "acc_norm_stderr": 0.012862523175351335 }, "harness|hellaswag|10": { "acc": 0.7450707030472018, "acc_stderr": 0.004349307702735165, "acc_norm": 0.8969328818960366, "acc_norm_stderr": 0.0030342513629399073 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595853, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595853 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6776315789473685, "acc_stderr": 0.038035102483515854, "acc_norm": 0.6776315789473685, "acc_norm_stderr": 0.038035102483515854 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6416184971098265, "acc_stderr": 0.036563436533531585, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.036563436533531585 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082636, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082636 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5404255319148936, "acc_stderr": 0.032579014820998356, "acc_norm": 0.5404255319148936, "acc_norm_stderr": 0.032579014820998356 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.025305906241590632, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.025305906241590632 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7709677419354839, "acc_stderr": 0.023904914311782655, "acc_norm": 0.7709677419354839, "acc_norm_stderr": 0.023904914311782655 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.03517945038691063, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7333333333333333, "acc_stderr": 0.03453131801885417, "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.03453131801885417 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8181818181818182, "acc_stderr": 0.027479603010538797, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.027479603010538797 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8911917098445595, "acc_stderr": 0.022473253332768763, "acc_norm": 0.8911917098445595, "acc_norm_stderr": 0.022473253332768763 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6435897435897436, "acc_stderr": 0.024283140529467305, "acc_norm": 0.6435897435897436, "acc_norm_stderr": 0.024283140529467305 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3111111111111111, "acc_stderr": 0.028226446749683515, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.028226446749683515 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6386554621848739, "acc_stderr": 0.03120469122515002, "acc_norm": 0.6386554621848739, "acc_norm_stderr": 0.03120469122515002 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3841059602649007, "acc_stderr": 0.03971301814719197, "acc_norm": 0.3841059602649007, "acc_norm_stderr": 0.03971301814719197 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.015630022970092427, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.015630022970092427 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5324074074074074, "acc_stderr": 0.03402801581358966, "acc_norm": 0.5324074074074074, "acc_norm_stderr": 0.03402801581358966 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8235294117647058, "acc_stderr": 0.026756401538078966, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.026756401538078966 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.025955020841621133, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.025955020841621133 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8091603053435115, "acc_stderr": 0.03446513350752599, "acc_norm": 0.8091603053435115, "acc_norm_stderr": 0.03446513350752599 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7520661157024794, "acc_stderr": 0.03941897526516301, "acc_norm": 0.7520661157024794, "acc_norm_stderr": 0.03941897526516301 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7055214723926381, "acc_stderr": 0.03581165790474082, "acc_norm": 0.7055214723926381, "acc_norm_stderr": 0.03581165790474082 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.8058252427184466, "acc_stderr": 0.03916667762822584, "acc_norm": 0.8058252427184466, "acc_norm_stderr": 0.03916667762822584 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.020930193185179326, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.020930193185179326 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8173690932311622, "acc_stderr": 0.013816335389973136, "acc_norm": 0.8173690932311622, "acc_norm_stderr": 0.013816335389973136 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7196531791907514, "acc_stderr": 0.024182427496577605, "acc_norm": 0.7196531791907514, "acc_norm_stderr": 0.024182427496577605 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4581005586592179, "acc_stderr": 0.016663683295020524, "acc_norm": 0.4581005586592179, "acc_norm_stderr": 0.016663683295020524 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7124183006535948, "acc_stderr": 0.025917806117147158, "acc_norm": 0.7124183006535948, "acc_norm_stderr": 0.025917806117147158 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6977491961414791, "acc_stderr": 0.02608270069539966, "acc_norm": 0.6977491961414791, "acc_norm_stderr": 0.02608270069539966 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7253086419753086, "acc_stderr": 0.024836057868294677, "acc_norm": 0.7253086419753086, "acc_norm_stderr": 0.024836057868294677 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.475177304964539, "acc_stderr": 0.02979071924382972, "acc_norm": 0.475177304964539, "acc_norm_stderr": 0.02979071924382972 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4726205997392438, "acc_stderr": 0.012751075788015055, "acc_norm": 0.4726205997392438, "acc_norm_stderr": 0.012751075788015055 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6691176470588235, "acc_stderr": 0.02858270975389845, "acc_norm": 0.6691176470588235, "acc_norm_stderr": 0.02858270975389845 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6568627450980392, "acc_stderr": 0.01920660684882536, "acc_norm": 0.6568627450980392, "acc_norm_stderr": 0.01920660684882536 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7061224489795919, "acc_stderr": 0.02916273841024977, "acc_norm": 0.7061224489795919, "acc_norm_stderr": 0.02916273841024977 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616914, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616914 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774711, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774711 }, "harness|hendrycksTest-virology|5": { "acc": 0.5783132530120482, "acc_stderr": 0.03844453181770917, "acc_norm": 0.5783132530120482, "acc_norm_stderr": 0.03844453181770917 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.6070991432068543, "mc1_stderr": 0.017097248285233065, "mc2": 0.7509762464971609, "mc2_stderr": 0.014412765533767163 }, "harness|winogrande|5": { "acc": 0.8492501973164956, "acc_stderr": 0.010056094631479674 }, "harness|gsm8k|5": { "acc": 0.5921152388172858, "acc_stderr": 0.013536742075643086 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_paulml__NeuralOmniWestBeaglake-7B
[ "region:us" ]
2024-02-05T10:00:20+00:00
{"pretty_name": "Evaluation run of paulml/NeuralOmniWestBeaglake-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [paulml/NeuralOmniWestBeaglake-7B](https://huggingface.co/paulml/NeuralOmniWestBeaglake-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_paulml__NeuralOmniWestBeaglake-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-05T09:58:00.255850](https://huggingface.co/datasets/open-llm-leaderboard/details_paulml__NeuralOmniWestBeaglake-7B/blob/main/results_2024-02-05T09-58-00.255850.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6452309636710489,\n \"acc_stderr\": 0.03228854730174679,\n \"acc_norm\": 0.6455942939381135,\n \"acc_norm_stderr\": 0.03295573791186191,\n \"mc1\": 0.6070991432068543,\n \"mc1_stderr\": 0.017097248285233065,\n \"mc2\": 0.7509762464971609,\n \"mc2_stderr\": 0.014412765533767163\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7167235494880546,\n \"acc_stderr\": 0.013167478735134575,\n \"acc_norm\": 0.7372013651877133,\n \"acc_norm_stderr\": 0.012862523175351335\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7450707030472018,\n \"acc_stderr\": 0.004349307702735165,\n \"acc_norm\": 0.8969328818960366,\n \"acc_norm_stderr\": 0.0030342513629399073\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595853,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595853\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6776315789473685,\n \"acc_stderr\": 0.038035102483515854,\n \"acc_norm\": 0.6776315789473685,\n \"acc_norm_stderr\": 0.038035102483515854\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.036563436533531585,\n \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.036563436533531585\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082636,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082636\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5404255319148936,\n \"acc_stderr\": 0.032579014820998356,\n \"acc_norm\": 0.5404255319148936,\n \"acc_norm_stderr\": 0.032579014820998356\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.025305906241590632,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.025305906241590632\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7709677419354839,\n \"acc_stderr\": 0.023904914311782655,\n \"acc_norm\": 0.7709677419354839,\n \"acc_norm_stderr\": 0.023904914311782655\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.03453131801885417,\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.03453131801885417\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.027479603010538797,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.027479603010538797\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.022473253332768763,\n \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.022473253332768763\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6435897435897436,\n \"acc_stderr\": 0.024283140529467305,\n \"acc_norm\": 0.6435897435897436,\n \"acc_norm_stderr\": 0.024283140529467305\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3111111111111111,\n \"acc_stderr\": 0.028226446749683515,\n \"acc_norm\": 0.3111111111111111,\n \"acc_norm_stderr\": 0.028226446749683515\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6386554621848739,\n \"acc_stderr\": 0.03120469122515002,\n \"acc_norm\": 0.6386554621848739,\n \"acc_norm_stderr\": 0.03120469122515002\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3841059602649007,\n \"acc_stderr\": 0.03971301814719197,\n \"acc_norm\": 0.3841059602649007,\n \"acc_norm_stderr\": 0.03971301814719197\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.015630022970092427,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.015630022970092427\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5324074074074074,\n \"acc_stderr\": 0.03402801581358966,\n \"acc_norm\": 0.5324074074074074,\n \"acc_norm_stderr\": 0.03402801581358966\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078966,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078966\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.025955020841621133,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.025955020841621133\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8091603053435115,\n \"acc_stderr\": 0.03446513350752599,\n \"acc_norm\": 0.8091603053435115,\n \"acc_norm_stderr\": 0.03446513350752599\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7520661157024794,\n \"acc_stderr\": 0.03941897526516301,\n \"acc_norm\": 0.7520661157024794,\n \"acc_norm_stderr\": 0.03941897526516301\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7055214723926381,\n \"acc_stderr\": 0.03581165790474082,\n \"acc_norm\": 0.7055214723926381,\n \"acc_norm_stderr\": 0.03581165790474082\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822584,\n \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822584\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.020930193185179326,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.020930193185179326\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8173690932311622,\n \"acc_stderr\": 0.013816335389973136,\n \"acc_norm\": 0.8173690932311622,\n \"acc_norm_stderr\": 0.013816335389973136\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7196531791907514,\n \"acc_stderr\": 0.024182427496577605,\n \"acc_norm\": 0.7196531791907514,\n \"acc_norm_stderr\": 0.024182427496577605\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4581005586592179,\n \"acc_stderr\": 0.016663683295020524,\n \"acc_norm\": 0.4581005586592179,\n \"acc_norm_stderr\": 0.016663683295020524\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7124183006535948,\n \"acc_stderr\": 0.025917806117147158,\n \"acc_norm\": 0.7124183006535948,\n \"acc_norm_stderr\": 0.025917806117147158\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6977491961414791,\n \"acc_stderr\": 0.02608270069539966,\n \"acc_norm\": 0.6977491961414791,\n \"acc_norm_stderr\": 0.02608270069539966\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7253086419753086,\n \"acc_stderr\": 0.024836057868294677,\n \"acc_norm\": 0.7253086419753086,\n \"acc_norm_stderr\": 0.024836057868294677\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.475177304964539,\n \"acc_stderr\": 0.02979071924382972,\n \"acc_norm\": 0.475177304964539,\n \"acc_norm_stderr\": 0.02979071924382972\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4726205997392438,\n \"acc_stderr\": 0.012751075788015055,\n \"acc_norm\": 0.4726205997392438,\n \"acc_norm_stderr\": 0.012751075788015055\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6691176470588235,\n \"acc_stderr\": 0.02858270975389845,\n \"acc_norm\": 0.6691176470588235,\n \"acc_norm_stderr\": 0.02858270975389845\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6568627450980392,\n \"acc_stderr\": 0.01920660684882536,\n \"acc_norm\": 0.6568627450980392,\n \"acc_norm_stderr\": 0.01920660684882536\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7061224489795919,\n \"acc_stderr\": 0.02916273841024977,\n \"acc_norm\": 0.7061224489795919,\n \"acc_norm_stderr\": 0.02916273841024977\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616914,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616914\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774711,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774711\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5783132530120482,\n \"acc_stderr\": 0.03844453181770917,\n \"acc_norm\": 0.5783132530120482,\n \"acc_norm_stderr\": 0.03844453181770917\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.6070991432068543,\n \"mc1_stderr\": 0.017097248285233065,\n \"mc2\": 0.7509762464971609,\n \"mc2_stderr\": 0.014412765533767163\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8492501973164956,\n \"acc_stderr\": 0.010056094631479674\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5921152388172858,\n \"acc_stderr\": 0.013536742075643086\n }\n}\n```", "repo_url": "https://huggingface.co/paulml/NeuralOmniWestBeaglake-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|arc:challenge|25_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|gsm8k|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hellaswag|10_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T09-58-00.255850.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["**/details_harness|winogrande|5_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-05T09-58-00.255850.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_05T09_58_00.255850", "path": ["results_2024-02-05T09-58-00.255850.parquet"]}, {"split": "latest", "path": ["results_2024-02-05T09-58-00.255850.parquet"]}]}]}
2024-02-05T10:00:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of paulml/NeuralOmniWestBeaglake-7B Dataset automatically created during the evaluation run of model paulml/NeuralOmniWestBeaglake-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-05T09:58:00.255850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of paulml/NeuralOmniWestBeaglake-7B\n\n\n\nDataset automatically created during the evaluation run of model paulml/NeuralOmniWestBeaglake-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T09:58:00.255850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of paulml/NeuralOmniWestBeaglake-7B\n\n\n\nDataset automatically created during the evaluation run of model paulml/NeuralOmniWestBeaglake-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T09:58:00.255850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
83716ad3a1658a12e9f8b0b8295c69dc9fb0ad50
Datasets used in [RankGPT](https://github.com/sunnweiwei/RankGPT).
liuqi6777/RankGPT
[ "region:us" ]
2024-02-05T10:22:15+00:00
{}
2024-02-05T13:23:21+00:00
[]
[]
TAGS #region-us
Datasets used in RankGPT.
[]
[ "TAGS\n#region-us \n" ]
a5430c807f967271a65e9628bbda646bff37ef41
The present dataset is compiled by using the following datasets: # CultureaX - Licesnse - ODC-By, CC0, [Paper](https://arxiv.org/pdf/2309.09400.pdf) - Source - https://huggingface.co/datasets/uonlp/CulturaX/viewer/or? - 49M tokens, 2.9M sentences # IndicQA - License - cc-by-4.0 - Source - https://huggingface.co/datasets/ai4bharat/IndicQA/viewer/indicqa.or - 0.23M tokens, 15K sentences # Odiaencorp - License - CC BY-NC-SA 4.0 - Source - https://lindat.mff.cuni.cz/repository/xmlui/handle/11234/1-3211 - 1.1M tokens, 85K sentences # Oscar - License - cc0-1.0, [Paper](https://arxiv.org/pdf/2201.06642.pdf) - Source - https://huggingface.co/datasets/oscar-corpus/OSCAR-2201 - 25.7M tokens, 1.2M sentences # Paraphrasing - License - cc-by-nc-4.0, [Paper](https://arxiv.org/abs/2203.05437) - Source - https://huggingface.co/datasets/ai4bharat/IndicParaphrase/viewer/or - 2.3M tokens, 0.105M sentences # PMO - License - cc-by-nc-4.0 - Source - [PMO website](https://www.pmindia.gov.in/ory/) - 2.2M tokens, 0.131M sentences # Samanantar - License - cc-by-nc-4.0, [Paper](https://arxiv.org/abs/2104.05596) - Source - https://huggingface.co/datasets/ai4bharat/samanantar/viewer/or - 10.25M Tokens, 0.909M Sentences # Sentiment Analysis - License - Unspecified - Source - https://huggingface.co/datasets/ai4bharat/IndicSentiment/viewer/translation-or - 34K Tokens, 1000 Sentences # Wiki - License - cc-by-sa-3.0, gfdl - Source - https://huggingface.co/datasets/wikimedia/wikipedia/viewer/20231101.or - 4.1M tokens, 0.253M sentences # XP3 - License - apache-2.0 - Source - https://huggingface.co/datasets/bigscience/xP3all - 4.9M tokens, 0.26M sentences
OdiaGenAIdata/pre_train_odia_data
[ "size_categories:1M<n<10M", "arxiv:2309.09400", "arxiv:2201.06642", "arxiv:2203.05437", "arxiv:2104.05596", "region:us" ]
2024-02-05T10:57:51+00:00
{"size_categories": ["1M<n<10M"], "extra_gated_prompt": "You agree to use this dataset solely for non-commercial and research purposes.", "extra_gated_fields": {"Country": "country", "Specific date": "date_picker", "I want to use this dataset for": {"type": "select", "options": ["Research", "Education", {"label": "Other", "value": "other"}]}, "I agree to use this dataset for non-commercial use ONLY": "checkbox"}, "configs": [{"config_name": "wiki", "data_files": [{"split": "train", "path": "wiki_odia_253Ks_4p11Mw.json"}]}, {"config_name": "oscar", "data_files": [{"split": "train", "path": "oscar_odia_1p16Ms_25Mw.json"}]}, {"config_name": "paraphrasing", "data_files": [{"split": "train", "path": "paraphrasing_odia_105Ks_2p3Mw.json"}]}, {"config_name": "indicQA", "data_files": [{"split": "train", "path": "indicQA_odia_12Ks_184Kw.json"}]}, {"config_name": "sentiment_analysis", "data_files": [{"split": "train", "path": "sentiment_analysis_odia_1Ks_34Kw.json"}]}, {"config_name": "odiaencorp", "data_files": [{"split": "train", "path": "odiaencorp_85Ks_1p1Mw.json"}]}, {"config_name": "xp3", "data_files": [{"split": "train", "path": "xp3_261Ks_4p9Mw.json"}]}, {"config_name": "samanantar", "data_files": [{"split": "train", "path": "samanantar_odia_990Ks_10Mw.json"}]}, {"config_name": "cultureax", "data_files": [{"split": "train", "path": "cultureax_odia_2p9Ks_49Mw.json"}]}, {"config_name": "pmo", "data_files": [{"split": "train", "path": "pmo_data.json"}]}, {"config_name": "varta", "data_files": [{"split": "validation", "path": "val_or_shard_01.json"}, {"split": "train", "path": "train_or_shard_01.json"}]}]}
2024-02-17T13:58:07+00:00
[ "2309.09400", "2201.06642", "2203.05437", "2104.05596" ]
[]
TAGS #size_categories-1M<n<10M #arxiv-2309.09400 #arxiv-2201.06642 #arxiv-2203.05437 #arxiv-2104.05596 #region-us
The present dataset is compiled by using the following datasets: # CultureaX - Licesnse - ODC-By, CC0, Paper - Source - URL - 49M tokens, 2.9M sentences # IndicQA - License - cc-by-4.0 - Source - URL - 0.23M tokens, 15K sentences # Odiaencorp - License - CC BY-NC-SA 4.0 - Source - URL - 1.1M tokens, 85K sentences # Oscar - License - cc0-1.0, Paper - Source - URL - 25.7M tokens, 1.2M sentences # Paraphrasing - License - cc-by-nc-4.0, Paper - Source - URL - 2.3M tokens, 0.105M sentences # PMO - License - cc-by-nc-4.0 - Source - PMO website - 2.2M tokens, 0.131M sentences # Samanantar - License - cc-by-nc-4.0, Paper - Source - URL - 10.25M Tokens, 0.909M Sentences # Sentiment Analysis - License - Unspecified - Source - URL - 34K Tokens, 1000 Sentences # Wiki - License - cc-by-sa-3.0, gfdl - Source - URL - 4.1M tokens, 0.253M sentences # XP3 - License - apache-2.0 - Source - URL - 4.9M tokens, 0.26M sentences
[ "# CultureaX\n - Licesnse - ODC-By, CC0, Paper\n - Source - URL\n - 49M tokens, 2.9M sentences", "# IndicQA\n - License - cc-by-4.0\n - Source - URL\n - 0.23M tokens, 15K sentences", "# Odiaencorp \n - License - CC BY-NC-SA 4.0\n - Source - URL\n - 1.1M tokens, 85K sentences", "# Oscar\n - License - cc0-1.0, Paper\n - Source - URL\n - 25.7M tokens, 1.2M sentences", "# Paraphrasing\n - License - cc-by-nc-4.0, Paper\n - Source - URL\n - 2.3M tokens, 0.105M sentences", "# PMO\n - License - cc-by-nc-4.0\n - Source - PMO website\n - 2.2M tokens, 0.131M sentences", "# Samanantar\n - License - cc-by-nc-4.0, Paper\n - Source - URL\n - 10.25M Tokens, 0.909M Sentences", "# Sentiment Analysis\n - License - Unspecified\n - Source - URL\n - 34K Tokens, 1000 Sentences", "# Wiki\n - License - cc-by-sa-3.0, gfdl\n - Source - URL\n - 4.1M tokens, 0.253M sentences", "# XP3\n - License - apache-2.0\n - Source - URL\n - 4.9M tokens, 0.26M sentences" ]
[ "TAGS\n#size_categories-1M<n<10M #arxiv-2309.09400 #arxiv-2201.06642 #arxiv-2203.05437 #arxiv-2104.05596 #region-us \n", "# CultureaX\n - Licesnse - ODC-By, CC0, Paper\n - Source - URL\n - 49M tokens, 2.9M sentences", "# IndicQA\n - License - cc-by-4.0\n - Source - URL\n - 0.23M tokens, 15K sentences", "# Odiaencorp \n - License - CC BY-NC-SA 4.0\n - Source - URL\n - 1.1M tokens, 85K sentences", "# Oscar\n - License - cc0-1.0, Paper\n - Source - URL\n - 25.7M tokens, 1.2M sentences", "# Paraphrasing\n - License - cc-by-nc-4.0, Paper\n - Source - URL\n - 2.3M tokens, 0.105M sentences", "# PMO\n - License - cc-by-nc-4.0\n - Source - PMO website\n - 2.2M tokens, 0.131M sentences", "# Samanantar\n - License - cc-by-nc-4.0, Paper\n - Source - URL\n - 10.25M Tokens, 0.909M Sentences", "# Sentiment Analysis\n - License - Unspecified\n - Source - URL\n - 34K Tokens, 1000 Sentences", "# Wiki\n - License - cc-by-sa-3.0, gfdl\n - Source - URL\n - 4.1M tokens, 0.253M sentences", "# XP3\n - License - apache-2.0\n - Source - URL\n - 4.9M tokens, 0.26M sentences" ]
49c2b5805f0df0dd7be9ba2162af60fda1d34ed8
# Dataset Card for "Wish-Summarization-Llama" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibm/Wish-Summarization-Llama
[ "region:us" ]
2024-02-05T11:05:50+00:00
{"dataset_info": {"features": [{"name": "idx", "dtype": "int64"}, {"name": "text", "dtype": "string"}, {"name": "highlights", "dtype": "string"}, {"name": "id", "dtype": "string"}, {"name": "qa", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "doc_score", "dtype": "float64"}, {"name": "score_qa", "dtype": "float64"}, {"name": "ans_num_words", "dtype": "int64"}, {"name": "text_num_words", "dtype": "int64"}, {"name": "text_longer_0.5", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 37134142, "num_examples": 10000}], "download_size": 22985746, "dataset_size": 37134142}}
2024-02-05T11:06:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wish-Summarization-Llama" More Information needed
[ "# Dataset Card for \"Wish-Summarization-Llama\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wish-Summarization-Llama\"\n\nMore Information needed" ]
d656e60e783a53223c1f1612244150a5bb774354
# Dataset Card for "Wish-QA-NQ-Falcon" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibm/Wish-QA-NQ-Falcon
[ "region:us" ]
2024-02-05T11:06:23+00:00
{"dataset_info": {"features": [{"name": "Unnamed: 0", "dtype": "int64"}, {"name": "id", "dtype": "int64"}, {"name": "title", "dtype": "string"}, {"name": "document", "dtype": "string"}, {"name": "original_question", "dtype": "string"}, {"name": "long_answers", "dtype": "string"}, {"name": "short_answers", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "qa", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "doc_score", "dtype": "float64"}, {"name": "score_qa", "dtype": "float64"}, {"name": "ans_num_words", "dtype": "int64"}, {"name": "text_num_words", "dtype": "int64"}, {"name": "text_longer_1.5", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1550698972, "num_examples": 39299}], "download_size": 835932164, "dataset_size": 1550698972}}
2024-02-05T11:12:53+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wish-QA-NQ-Falcon" More Information needed
[ "# Dataset Card for \"Wish-QA-NQ-Falcon\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wish-QA-NQ-Falcon\"\n\nMore Information needed" ]
a35d7f9e03d4d4be5a0e3939f57d885f4a5fccc4
# Dataset Card for [Dataset Name] ## Table of Contents - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Homepage:** [homepage](https://github.com/masakhane-io/masakhane-pos/) - **Repository:** [github](https://github.com/masakhane-io/masakhane-pos/) - **Paper:** [paper](https://aclanthology.org/2023.acl-long.609/) - **Point of Contact:** [Masakhane](https://www.masakhane.io/) or [email protected] ### Dataset Summary MasakhaPOS is the largest publicly available high-quality dataset for part-of-speech (POS) tagging in 20 African languages. The languages covered are: The train/validation/test sets are available for all the 20 languages. For more details see https://aclanthology.org/2023.acl-long.609/ ### Supported Tasks and Leaderboards [More Information Needed] - `Part-of-speech`: The performance in this task is measured with [accuracy](https://huggingface.co/spaces/evaluate-metric/accuracy) (higher is better). ### Languages There are 20 languages available : - Bambara (bam) - Ghomala (bbj) - Ewe (ewe) - Fon (fon) - Hausa (hau) - Igbo (ibo) - Kinyarwanda (kin) - Luganda (lug) - Dholuo (luo) - Mossi (mos) - Chichewa (nya) - Nigerian Pidgin - chShona (sna) - Kiswahili (swΔ…) - Setswana (tsn) - Twi (twi) - Wolof (wol) - isiXhosa (xho) - YorΓΉbΓ‘ (yor) - isiZulu (zul) ## Dataset Structure ### Data Instances The examples look like this for YorΓΉbΓ‘: ``` from datasets import load_dataset data = load_dataset('masakhane/masakhapos', 'yor') # Please, specify the language code # A data point consists of sentences seperated by empty line and tab-seperated tokens and tags. {'id': '0', 'ner_tags': [0, 10, 10, 16, 0, 14, 0, 16, 0], 'tokens': ['α»ŒΜ€gbẹ́ni', 'Nuhu', 'Adam', 'kúroΜ€', 'nípoΜ€', 'bí', 'αΊΉní', 'yọ', 'jiΜ€gá'] } ``` ### Data Fields - `id`: id of the sample - `tokens`: the tokens of the example text - `upos`: the POS tags of each token The POS tags correspond to this list: ``` "NOUN", "PUNCT", "ADP", "NUM", "SYM", "SCONJ", "ADJ", "PART", "DET", "CCONJ", "PROPN", "PRON", "X", "ADV", "INTJ", "VERB", "AUX",``` The definition of the tags can be found on [UD website](https://universaldependencies.org/u/pos/) ### Data Splits For all languages, there are three splits. The original splits were named `train`, `dev` and `test` and they correspond to the `train`, `validation` and `test` splits. The splits have the following sizes : | Language | train | validation | test | |-----------------|------:|-----------:|------:| | Bambara | 775 | 154 | 619 | | Ghomala | 750 | 149 | 599 | | Ewe | 728 | 145 | 582 | | Fon | 810 | 161 | 646 | | Hausa | 753 | 150 | 601 | | Igbo | 803 | 160 | 642 | | Kinyarwanda | 757 | 151 | 604 | | Luganda | 733 | 146 | 586 | | Luo | 758 | 151 | 606 | | Mossi | 757 | 151 | 604 | | Chichewa | 728 | 145 | 582 | | Nigerian-Pidgin | 752 | 150 | 600 | | chiShona | 747 | 149 | 596 | | Kiswahili | 693 | 138 | 553 | | Setswana | 754 | 150 | 602 | | Akan/Twi | 785 | 157 | 628 | | Wolof | 782 | 156 | 625 | | isiXhosa | 752 | 150 | 601 | | Yoruba | 893 | 178 | 713 | | isiZulu | 753 | 150 | 601 | ## Dataset Creation ### Curation Rationale The dataset was introduced to introduce new resources to 20 languages that were under-served for natural language processing. [More Information Needed] ### Source Data The source of the data is from the news domain, details can be found here https://aclanthology.org/2023.acl-long.609/ #### Initial Data Collection and Normalization The articles were word-tokenized, information on the exact pre-processing pipeline is unavailable. #### Who are the source language producers? The source language was produced by journalists and writers employed by the news agency and newspaper mentioned above. ### Annotations #### Annotation process Details can be found here https://aclanthology.org/2023.acl-long.609/ #### Who are the annotators? Annotators were recruited from [Masakhane](https://www.masakhane.io/) ### Personal and Sensitive Information The data is sourced from newspaper source and only contains mentions of public figures or individuals ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations Users should keep in mind that the dataset only contains news text, which might limit the applicability of the developed systems to other domains. ## Additional Information ### Dataset Curators ### Licensing Information The licensing status of the data is CC 4.0 Non-Commercial ### Citation Information Provide the [BibTex](http://www.bibtex.org/)-formatted reference for the dataset. For example: ``` @inproceedings{dione-etal-2023-masakhapos, title = "{M}asakha{POS}: Part-of-Speech Tagging for Typologically Diverse {A}frican languages", author = "Dione, Cheikh M. Bamba and Adelani, David Ifeoluwa and Nabende, Peter and Alabi, Jesujoba and Sindane, Thapelo and Buzaaba, Happy and Muhammad, Shamsuddeen Hassan and Emezue, Chris Chinenye and Ogayo, Perez and Aremu, Anuoluwapo and Gitau, Catherine and Mbaye, Derguene and Mukiibi, Jonathan and Sibanda, Blessing and Dossou, Bonaventure F. P. and Bukula, Andiswa and Mabuya, Rooweither and Tapo, Allahsera Auguste and Munkoh-Buabeng, Edwin and Memdjokam Koagne, Victoire and Ouoba Kabore, Fatoumata and Taylor, Amelia and Kalipe, Godson and Macucwa, Tebogo and Marivate, Vukosi and Gwadabe, Tajuddeen and Elvis, Mboning Tchiaze and Onyenwe, Ikechukwu and Atindogbe, Gratien and Adelani, Tolulope and Akinade, Idris and Samuel, Olanrewaju and Nahimana, Marien and Musabeyezu, Th{\'e}og{\`e}ne and Niyomutabazi, Emile and Chimhenga, Ester and Gotosa, Kudzai and Mizha, Patrick and Agbolo, Apelete and Traore, Seydou and Uchechukwu, Chinedu and Yusuf, Aliyu and Abdullahi, Muhammad and Klakow, Dietrich", editor = "Rogers, Anna and Boyd-Graber, Jordan and Okazaki, Naoaki", booktitle = "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)", month = jul, year = "2023", address = "Toronto, Canada", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/2023.acl-long.609", doi = "10.18653/v1/2023.acl-long.609", pages = "10883--10900", abstract = "In this paper, we present AfricaPOS, the largest part-of-speech (POS) dataset for 20 typologically diverse African languages. We discuss the challenges in annotating POS for these languages using the universal dependencies (UD) guidelines. We conducted extensive POS baseline experiments using both conditional random field and several multilingual pre-trained language models. We applied various cross-lingual transfer models trained with data available in the UD. Evaluating on the AfricaPOS dataset, we show that choosing the best transfer language(s) in both single-source and multi-source setups greatly improves the POS tagging performance of the target languages, in particular when combined with parameter-fine-tuning methods. Crucially, transferring knowledge from a language that matches the language family and morphosyntactic properties seems to be more effective for POS tagging in unseen languages.", } ``` ### Contributions Thanks to [@dadelani](https://github.com/dadelani) for adding this dataset.
masakhane/masakhapos
[ "task_categories:token-classification", "task_ids:named-entity-recognition", "annotations_creators:expert-generated", "language_creators:expert-generated", "multilinguality:multilingual", "size_categories:1K<n<10K", "source_datasets:original", "language:bm", "language:bbj", "language:ee", "language:fon", "language:ha", "language:ig", "language:rw", "language:lg", "language:luo", "language:mos", "language:ny", "language:pcm", "language:sn", "language:sw", "language:tn", "language:tw", "language:wo", "language:xh", "language:yo", "language:zu", "license:afl-3.0", "pos", "masakhapos", "masakhane", "region:us" ]
2024-02-05T11:09:14+00:00
{"annotations_creators": ["expert-generated"], "language_creators": ["expert-generated"], "language": ["bm", "bbj", "ee", "fon", "ha", "ig", "rw", "lg", "luo", "mos", "ny", "pcm", "sn", "sw", "tn", "tw", "wo", "xh", "yo", "zu"], "license": ["afl-3.0"], "multilinguality": ["multilingual"], "size_categories": ["1K<n<10K"], "source_datasets": ["original"], "task_categories": ["token-classification"], "task_ids": ["named-entity-recognition"], "pretty_name": "masakhapos", "tags": ["pos", "masakhapos", "masakhane"]}
2024-02-05T11:09:55+00:00
[]
[ "bm", "bbj", "ee", "fon", "ha", "ig", "rw", "lg", "luo", "mos", "ny", "pcm", "sn", "sw", "tn", "tw", "wo", "xh", "yo", "zu" ]
TAGS #task_categories-token-classification #task_ids-named-entity-recognition #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-multilingual #size_categories-1K<n<10K #source_datasets-original #language-Bambara #language-GhomΓ‘lΓ‘' #language-Ewe #language-Fon #language-Hausa #language-Igbo #language-Kinyarwanda #language-Ganda #language-Luo (Kenya and Tanzania) #language-Mossi #language-Nyanja #language-Nigerian Pidgin #language-Shona #language-Swahili (macrolanguage) #language-Tswana #language-Twi #language-Wolof #language-Xhosa #language-Yoruba #language-Zulu #license-afl-3.0 #pos #masakhapos #masakhane #region-us
Dataset Card for [Dataset Name] =============================== Table of Contents ----------------- * Table of Contents * Dataset Description + Dataset Summary + Supported Tasks and Leaderboards + Languages * Dataset Structure + Data Instances + Data Fields + Data Splits * Dataset Creation + Curation Rationale + Source Data + Annotations + Personal and Sensitive Information * Considerations for Using the Data + Social Impact of Dataset + Discussion of Biases + Other Known Limitations * Additional Information + Dataset Curators + Licensing Information + Citation Information + Contributions Dataset Description ------------------- * Homepage: homepage * Repository: github * Paper: paper * Point of Contact: Masakhane or didelani@URL ### Dataset Summary MasakhaPOS is the largest publicly available high-quality dataset for part-of-speech (POS) tagging in 20 African languages. The languages covered are: The train/validation/test sets are available for all the 20 languages. For more details see URL ### Supported Tasks and Leaderboards * 'Part-of-speech': The performance in this task is measured with accuracy (higher is better). ### Languages There are 20 languages available : * Bambara (bam) * Ghomala (bbj) * Ewe (ewe) * Fon (fon) * Hausa (hau) * Igbo (ibo) * Kinyarwanda (kin) * Luganda (lug) * Dholuo (luo) * Mossi (mos) * Chichewa (nya) * Nigerian Pidgin * chShona (sna) * Kiswahili (swΔ…) * Setswana (tsn) * Twi (twi) * Wolof (wol) * isiXhosa (xho) * YorΓΉbΓ‘ (yor) * isiZulu (zul) Dataset Structure ----------------- ### Data Instances The examples look like this for YorΓΉbΓ‘: ### Data Fields * 'id': id of the sample * 'tokens': the tokens of the example text * 'upos': the POS tags of each token The POS tags correspond to this list: The definition of the tags can be found on UD website ### Data Splits For all languages, there are three splits. The original splits were named 'train', 'dev' and 'test' and they correspond to the 'train', 'validation' and 'test' splits. The splits have the following sizes : Dataset Creation ---------------- ### Curation Rationale The dataset was introduced to introduce new resources to 20 languages that were under-served for natural language processing. ### Source Data The source of the data is from the news domain, details can be found here URL #### Initial Data Collection and Normalization The articles were word-tokenized, information on the exact pre-processing pipeline is unavailable. #### Who are the source language producers? The source language was produced by journalists and writers employed by the news agency and newspaper mentioned above. ### Annotations #### Annotation process Details can be found here URL #### Who are the annotators? Annotators were recruited from Masakhane ### Personal and Sensitive Information The data is sourced from newspaper source and only contains mentions of public figures or individuals Considerations for Using the Data --------------------------------- ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations Users should keep in mind that the dataset only contains news text, which might limit the applicability of the developed systems to other domains. Additional Information ---------------------- ### Dataset Curators ### Licensing Information The licensing status of the data is CC 4.0 Non-Commercial Provide the BibTex-formatted reference for the dataset. For example: ### Contributions Thanks to @dadelani for adding this dataset.
[ "### Dataset Summary\n\n\nMasakhaPOS is the largest publicly available high-quality dataset for part-of-speech (POS) tagging in 20 African languages. The languages covered are:\n\n\nThe train/validation/test sets are available for all the 20 languages.\n\n\nFor more details see URL", "### Supported Tasks and Leaderboards\n\n\n* 'Part-of-speech': The performance in this task is measured with accuracy (higher is better).", "### Languages\n\n\nThere are 20 languages available :\n\n\n* Bambara (bam)\n* Ghomala (bbj)\n* Ewe (ewe)\n* Fon (fon)\n* Hausa (hau)\n* Igbo (ibo)\n* Kinyarwanda (kin)\n* Luganda (lug)\n* Dholuo (luo)\n* Mossi (mos)\n* Chichewa (nya)\n* Nigerian Pidgin\n* chShona (sna)\n* Kiswahili (swΔ…)\n* Setswana (tsn)\n* Twi (twi)\n* Wolof (wol)\n* isiXhosa (xho)\n* YorΓΉbΓ‘ (yor)\n* isiZulu (zul)\n\n\nDataset Structure\n-----------------", "### Data Instances\n\n\nThe examples look like this for YorΓΉbΓ‘:", "### Data Fields\n\n\n* 'id': id of the sample\n* 'tokens': the tokens of the example text\n* 'upos': the POS tags of each token\n\n\nThe POS tags correspond to this list:\n\n\nThe definition of the tags can be found on UD website", "### Data Splits\n\n\nFor all languages, there are three splits.\n\n\nThe original splits were named 'train', 'dev' and 'test' and they correspond to the 'train', 'validation' and 'test' splits.\n\n\nThe splits have the following sizes :\n\n\n\nDataset Creation\n----------------", "### Curation Rationale\n\n\nThe dataset was introduced to introduce new resources to 20 languages that were under-served for natural language processing.", "### Source Data\n\n\nThe source of the data is from the news domain, details can be found here URL", "#### Initial Data Collection and Normalization\n\n\nThe articles were word-tokenized, information on the exact pre-processing pipeline is unavailable.", "#### Who are the source language producers?\n\n\nThe source language was produced by journalists and writers employed by the news agency and newspaper mentioned above.", "### Annotations", "#### Annotation process\n\n\nDetails can be found here URL", "#### Who are the annotators?\n\n\nAnnotators were recruited from Masakhane", "### Personal and Sensitive Information\n\n\nThe data is sourced from newspaper source and only contains mentions of public figures or individuals\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations\n\n\nUsers should keep in mind that the dataset only contains news text, which might limit the applicability of the developed systems to other domains.\n\n\nAdditional Information\n----------------------", "### Dataset Curators", "### Licensing Information\n\n\nThe licensing status of the data is CC 4.0 Non-Commercial\n\n\nProvide the BibTex-formatted reference for the dataset. For example:", "### Contributions\n\n\nThanks to @dadelani for adding this dataset." ]
[ "TAGS\n#task_categories-token-classification #task_ids-named-entity-recognition #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-multilingual #size_categories-1K<n<10K #source_datasets-original #language-Bambara #language-GhomΓ‘lΓ‘' #language-Ewe #language-Fon #language-Hausa #language-Igbo #language-Kinyarwanda #language-Ganda #language-Luo (Kenya and Tanzania) #language-Mossi #language-Nyanja #language-Nigerian Pidgin #language-Shona #language-Swahili (macrolanguage) #language-Tswana #language-Twi #language-Wolof #language-Xhosa #language-Yoruba #language-Zulu #license-afl-3.0 #pos #masakhapos #masakhane #region-us \n", "### Dataset Summary\n\n\nMasakhaPOS is the largest publicly available high-quality dataset for part-of-speech (POS) tagging in 20 African languages. The languages covered are:\n\n\nThe train/validation/test sets are available for all the 20 languages.\n\n\nFor more details see URL", "### Supported Tasks and Leaderboards\n\n\n* 'Part-of-speech': The performance in this task is measured with accuracy (higher is better).", "### Languages\n\n\nThere are 20 languages available :\n\n\n* Bambara (bam)\n* Ghomala (bbj)\n* Ewe (ewe)\n* Fon (fon)\n* Hausa (hau)\n* Igbo (ibo)\n* Kinyarwanda (kin)\n* Luganda (lug)\n* Dholuo (luo)\n* Mossi (mos)\n* Chichewa (nya)\n* Nigerian Pidgin\n* chShona (sna)\n* Kiswahili (swΔ…)\n* Setswana (tsn)\n* Twi (twi)\n* Wolof (wol)\n* isiXhosa (xho)\n* YorΓΉbΓ‘ (yor)\n* isiZulu (zul)\n\n\nDataset Structure\n-----------------", "### Data Instances\n\n\nThe examples look like this for YorΓΉbΓ‘:", "### Data Fields\n\n\n* 'id': id of the sample\n* 'tokens': the tokens of the example text\n* 'upos': the POS tags of each token\n\n\nThe POS tags correspond to this list:\n\n\nThe definition of the tags can be found on UD website", "### Data Splits\n\n\nFor all languages, there are three splits.\n\n\nThe original splits were named 'train', 'dev' and 'test' and they correspond to the 'train', 'validation' and 'test' splits.\n\n\nThe splits have the following sizes :\n\n\n\nDataset Creation\n----------------", "### Curation Rationale\n\n\nThe dataset was introduced to introduce new resources to 20 languages that were under-served for natural language processing.", "### Source Data\n\n\nThe source of the data is from the news domain, details can be found here URL", "#### Initial Data Collection and Normalization\n\n\nThe articles were word-tokenized, information on the exact pre-processing pipeline is unavailable.", "#### Who are the source language producers?\n\n\nThe source language was produced by journalists and writers employed by the news agency and newspaper mentioned above.", "### Annotations", "#### Annotation process\n\n\nDetails can be found here URL", "#### Who are the annotators?\n\n\nAnnotators were recruited from Masakhane", "### Personal and Sensitive Information\n\n\nThe data is sourced from newspaper source and only contains mentions of public figures or individuals\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations\n\n\nUsers should keep in mind that the dataset only contains news text, which might limit the applicability of the developed systems to other domains.\n\n\nAdditional Information\n----------------------", "### Dataset Curators", "### Licensing Information\n\n\nThe licensing status of the data is CC 4.0 Non-Commercial\n\n\nProvide the BibTex-formatted reference for the dataset. For example:", "### Contributions\n\n\nThanks to @dadelani for adding this dataset." ]
cff4229bbc40b360fd2a5ec5219b939e2dee6131
# Dataset Card for "Wish-QA-ASQA-Llama" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibm/Wish-QA-ASQA-Llama
[ "region:us" ]
2024-02-05T11:12:56+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "float64"}, {"name": "old_question", "dtype": "string"}, {"name": "old_answer", "dtype": "string"}, {"name": "passage_1", "dtype": "string"}, {"name": "passage_2", "dtype": "string"}, {"name": "passage_3", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "qa", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "doc_score", "dtype": "float64"}, {"name": "score_qa", "dtype": "float64"}, {"name": "ans_num_words", "dtype": "int64"}, {"name": "text_num_words", "dtype": "int64"}, {"name": "text_longer_1.5", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 17679459, "num_examples": 3460}], "download_size": 10640928, "dataset_size": 17679459}}
2024-02-05T11:13:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wish-QA-ASQA-Llama" More Information needed
[ "# Dataset Card for \"Wish-QA-ASQA-Llama\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wish-QA-ASQA-Llama\"\n\nMore Information needed" ]
57c73ec74f824c1e23b3c6519f0d68ca5be761b8
# Dataset Card for "Wish-QA-ELI5-Falcon" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibm/Wish-QA-ELI5-Falcon
[ "region:us" ]
2024-02-05T11:13:04+00:00
{"dataset_info": {"features": [{"name": "Unnamed: 0", "dtype": "float64"}, {"name": "id", "dtype": "string"}, {"name": "title_question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "passage_1", "dtype": "string"}, {"name": "passage_2", "dtype": "string"}, {"name": "passage_3", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "qa", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "doc_score", "dtype": "float64"}, {"name": "score_qa", "dtype": "float64"}, {"name": "old_question", "dtype": "string"}, {"name": "old_answer", "dtype": "string"}, {"name": "ans_num_words", "dtype": "int64"}, {"name": "text_num_words", "dtype": "int64"}, {"name": "text_longer_1.5", "dtype": "int64"}, {"name": "input", "dtype": "string"}, {"name": "output 0 answer", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 53884366, "num_examples": 10000}], "download_size": 32253671, "dataset_size": 53884366}}
2024-02-05T11:13:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wish-QA-ELI5-Falcon" More Information needed
[ "# Dataset Card for \"Wish-QA-ELI5-Falcon\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wish-QA-ELI5-Falcon\"\n\nMore Information needed" ]
d54afb54db580fe9dc9e501da0b20e31ff60209c
# Dataset Card for "Wish-QA-Falcon" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibm/Wish-QA-Falcon
[ "region:us" ]
2024-02-05T11:13:23+00:00
{"dataset_info": {"features": [{"name": "Unnamed: 0", "dtype": "int64"}, {"name": "title", "dtype": "string"}, {"name": "document_id", "dtype": "string"}, {"name": "document_url", "dtype": "string"}, {"name": "passage_id", "dtype": "int64"}, {"name": "passage_title", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "qa", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "doc_score", "dtype": "float64"}, {"name": "score_qa", "dtype": "float64"}, {"name": "ans_num_words", "dtype": "int64"}, {"name": "text_num_words", "dtype": "int64"}, {"name": "text_longer_1.5", "dtype": "int64"}, {"name": "odd_question", "dtype": "bool"}], "splits": [{"name": "train", "num_bytes": 17858882, "num_examples": 10791}], "download_size": 9299253, "dataset_size": 17858882}}
2024-02-05T11:13:30+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wish-QA-Falcon" More Information needed
[ "# Dataset Card for \"Wish-QA-Falcon\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wish-QA-Falcon\"\n\nMore Information needed" ]
f958140cd4bb047e416ab52e98d90dccce712a95
# Dataset Card for "Wish-Summarization-Falcon" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibm/Wish-Summarization-Falcon
[ "region:us" ]
2024-02-05T11:13:31+00:00
{"dataset_info": {"features": [{"name": "idx", "dtype": "int64"}, {"name": "text", "dtype": "string"}, {"name": "highlights", "dtype": "string"}, {"name": "id", "dtype": "string"}, {"name": "qa", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "doc_score", "dtype": "float64"}, {"name": "score_qa", "dtype": "float64"}, {"name": "ans_num_words", "dtype": "int64"}, {"name": "text_num_words", "dtype": "int64"}, {"name": "text_longer_0.5", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 34604198, "num_examples": 10000}], "download_size": 21127302, "dataset_size": 34604198}}
2024-02-05T11:13:44+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wish-Summarization-Falcon" More Information needed
[ "# Dataset Card for \"Wish-Summarization-Falcon\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wish-Summarization-Falcon\"\n\nMore Information needed" ]
8c253ada90a94ecb3dc8c0fbf4dcc9f372f3e6ae
# Dataset Card for "Wish-QA-ELI5-Llama" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibm/Wish-QA-ELI5-Llama
[ "region:us" ]
2024-02-05T11:13:45+00:00
{"dataset_info": {"features": [{"name": "Unnamed: 0", "dtype": "int64"}, {"name": "id", "dtype": "string"}, {"name": "old_question", "dtype": "string"}, {"name": "old_answer", "dtype": "string"}, {"name": "passage_1", "dtype": "string"}, {"name": "passage_2", "dtype": "string"}, {"name": "passage_3", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "qa", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "doc_score", "dtype": "float64"}, {"name": "score_qa", "dtype": "float64"}, {"name": "ans_num_words", "dtype": "int64"}, {"name": "text_num_words", "dtype": "int64"}, {"name": "text_longer_1.5", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 49631519, "num_examples": 8413}], "download_size": 29992504, "dataset_size": 49631519}}
2024-02-05T11:14:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wish-QA-ELI5-Llama" More Information needed
[ "# Dataset Card for \"Wish-QA-ELI5-Llama\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wish-QA-ELI5-Llama\"\n\nMore Information needed" ]
49e57d0e34016c7ceaa4877c90736fe713b3bb02
# Dataset Card for "Wish-QA-MED-Llama" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibm/Wish-QA-MED-Llama
[ "region:us" ]
2024-02-05T11:14:04+00:00
{"dataset_info": {"features": [{"name": "pubid", "dtype": "int64"}, {"name": "title_question", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "long_answer", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "qa", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "doc_score", "dtype": "float64"}, {"name": "score_qa", "dtype": "float64"}, {"name": "ans_num_words", "dtype": "int64"}, {"name": "text_num_words", "dtype": "int64"}, {"name": "text_longer_1.5", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 52697515, "num_examples": 10000}], "download_size": 27722168, "dataset_size": 52697515}}
2024-02-05T11:14:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wish-QA-MED-Llama" More Information needed
[ "# Dataset Card for \"Wish-QA-MED-Llama\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wish-QA-MED-Llama\"\n\nMore Information needed" ]
584354e311ed0d68a2b48eea27615281fae05974
# Dataset Card for "Wish-QA-MED-Falcon" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibm/Wish-QA-MED-Falcon
[ "region:us" ]
2024-02-05T11:14:25+00:00
{"dataset_info": {"features": [{"name": "pubid", "dtype": "int64"}, {"name": "title_question", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "long_answer", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "qa", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "doc_score", "dtype": "float64"}, {"name": "score_qa", "dtype": "float64"}, {"name": "ans_num_words", "dtype": "int64"}, {"name": "text_num_words", "dtype": "int64"}, {"name": "text_longer_1.5", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 49265046, "num_examples": 10000}], "download_size": 26044954, "dataset_size": 49265046}}
2024-02-05T11:14:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wish-QA-MED-Falcon" More Information needed
[ "# Dataset Card for \"Wish-QA-MED-Falcon\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wish-QA-MED-Falcon\"\n\nMore Information needed" ]
c000cdce9560e2761a707ab2686360183aa8b7c3
# Dataset Card for "Wish-IE-Falcon" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibm/Wish-IE-Falcon
[ "region:us" ]
2024-02-05T11:14:44+00:00
{"dataset_info": {"features": [{"name": "Unnamed: 0", "dtype": "int64"}, {"name": "id", "dtype": "int64"}, {"name": "title", "dtype": "string"}, {"name": "document", "dtype": "string"}, {"name": "original_question", "dtype": "string"}, {"name": "long_answers", "dtype": "string"}, {"name": "short_answers", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "qa", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "doc_score", "dtype": "int64"}, {"name": "score_qa", "dtype": "float64"}, {"name": "ans_num_words", "dtype": "int64"}, {"name": "text_num_words", "dtype": "int64"}, {"name": "text_longer_1.5", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 51645994, "num_examples": 1000}], "download_size": 26325594, "dataset_size": 51645994}}
2024-02-05T11:14:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wish-IE-Falcon" More Information needed
[ "# Dataset Card for \"Wish-IE-Falcon\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wish-IE-Falcon\"\n\nMore Information needed" ]
c008ccb87369becb8ec4fe24784f413504df2a26
# Dataset Card for "Wish-QA-ASQA-Falcon" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibm/Wish-QA-ASQA-Falcon
[ "region:us" ]
2024-02-05T11:15:02+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "old_question", "dtype": "string"}, {"name": "old_answer", "dtype": "string"}, {"name": "passage_1", "dtype": "string"}, {"name": "passage_2", "dtype": "string"}, {"name": "passage_3", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "qa", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "doc_score", "dtype": "float64"}, {"name": "score_qa", "dtype": "float64"}, {"name": "ans_num_words", "dtype": "int64"}, {"name": "text_num_words", "dtype": "int64"}, {"name": "text_longer_1.5", "dtype": "int64"}, {"name": "input", "dtype": "string"}, {"name": "output 0 answer", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 23433520, "num_examples": 4354}], "download_size": 14082055, "dataset_size": 23433520}}
2024-02-05T11:15:17+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wish-QA-ASQA-Falcon" More Information needed
[ "# Dataset Card for \"Wish-QA-ASQA-Falcon\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wish-QA-ASQA-Falcon\"\n\nMore Information needed" ]
cd639f543ee8a732139b3cb8bb1d835b0b622c40
# Dataset Card for "Wish-QA-NQ-Llama" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibm/Wish-QA-NQ-Llama
[ "region:us" ]
2024-02-05T11:15:24+00:00
{"dataset_info": {"features": [{"name": "Unnamed: 0", "dtype": "int64"}, {"name": "id", "dtype": "int64"}, {"name": "title", "dtype": "string"}, {"name": "document", "dtype": "string"}, {"name": "original_question", "dtype": "string"}, {"name": "long_answers", "dtype": "string"}, {"name": "short_answers", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "qa", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "doc_score", "dtype": "float64"}, {"name": "score_qa", "dtype": "float64"}, {"name": "ans_num_words", "dtype": "int64"}, {"name": "text_num_words", "dtype": "int64"}, {"name": "text_longer_1.5", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 472486869, "num_examples": 10000}], "download_size": 250060927, "dataset_size": 472486869}}
2024-02-05T11:17:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wish-QA-NQ-Llama" More Information needed
[ "# Dataset Card for \"Wish-QA-NQ-Llama\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wish-QA-NQ-Llama\"\n\nMore Information needed" ]
fc19b32ef4e07fb5255a7527da3eb6dc18d592e4
# Dataset Card for "high_vs_random_min_length_500" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
kristmh/high_vs_random_min_length_500
[ "region:us" ]
2024-02-05T11:15:42+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validate", "path": "data/validate-*"}]}], "dataset_info": {"features": [{"name": "text_clean", "dtype": "string"}, {"name": "label", "dtype": "int64"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "test", "num_bytes": 26129807, "num_examples": 15895}, {"name": "train", "num_bytes": 212631694, "num_examples": 127154}, {"name": "validate", "num_bytes": 25999306, "num_examples": 15894}], "download_size": 123342839, "dataset_size": 264760807}}
2024-02-05T11:16:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for "high_vs_random_min_length_500" More Information needed
[ "# Dataset Card for \"high_vs_random_min_length_500\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"high_vs_random_min_length_500\"\n\nMore Information needed" ]
16755b11ff2765cc9f2b9d3b619278e1e30d868a
This contains papers and different forms 2073 git submodule add https://github.com/ppwwyyxx/SoPaper 2074 cd SoPaper/ 2075 ls 2076 pip install . 2077 sopaper 2078 sopaper unimath 2079 ls 2080 mkdir data 2081 mv Unimath.pdf data/ 2082 sopaper unimath --help 2083 pdftotext data/Unimath.pdf 2084 cd data/ 2085 git init 2086 git add Unimath.* 2087 git commit -m 'baseline' 2088 pandoc Unimath.pdf Unimath.org 2089 pdftohtml Unimath.pdf 2090 ls -ltar 2091 pandoc Unimath.html Unimath.org 2092 pandoc Unimath.html -o Unimath.org 2093 pandoc Unimath.html -O Unimath.org 2094 pandoc --help 2095 pandoc Unimath.html --to org 2096 ls -latr 2097 pandoc Unimaths.html --to org 2098 pandoc Unimaths.html --to org >Unimath.org 2099 pandoc Unimaths.html --to md 2100 pandoc Unimaths.html --to markdown 2101 git add Unimath.org 2102 git commit -m 'base' -a 2103 git remote add https://huggingface.co/datasets/introspector/papers 2104 git remote add origin https://huggingface.co/datasets/introspector/papers 2105 git add * 2106 git commit -m 'paper step1' -a 2107 git push 2108 git pull 2109 git config pull.rebase true # rebase 2110 git pull 2111 git commit -m 'merge' -a 2112 git push 2113 cp ~/.gitignore_templates/Emacs.gitignore .gitingnore 2114 cp ~/.gitignore_templates/Emacs.gitignore .gitignore 2115 git status 2116 git add .gitignore 2117 git commit -m 'clean' -a 2118 ls 2119 mkdir -p 2016/09/27/Heidelberg/HLF2015/Unimath 2120 mv Unimath* 2016/09/27/Heidelberg/HLF2015/Unimath/ 2121 git status 2122 git add 2016 2123 git commit -m 'moving' -a 2124 git push 2125 mv 2016/09/27/Heidelberg/HLF2015 016/09/27/Heidelberg/HLF2016 2126 git add 2016/ 2127 git commit -m 'move' -a 2128 git push 2129 history
introspector/papers
[ "license:creativeml-openrail-m", "region:us" ]
2024-02-05T11:22:17+00:00
{"license": "creativeml-openrail-m"}
2024-02-05T14:38:03+00:00
[]
[]
TAGS #license-creativeml-openrail-m #region-us
This contains papers and different forms 2073 git submodule add URL 2074 cd SoPaper/ 2075 ls 2076 pip install . 2077 sopaper 2078 sopaper unimath 2079 ls 2080 mkdir data 2081 mv URL data/ 2082 sopaper unimath --help 2083 pdftotext data/URL 2084 cd data/ 2085 git init 2086 git add Unimath.* 2087 git commit -m 'baseline' 2088 pandoc URL URL 2089 pdftohtml URL 2090 ls -ltar 2091 pandoc URL URL 2092 pandoc URL -o URL 2093 pandoc URL -O URL 2094 pandoc --help 2095 pandoc URL --to org 2096 ls -latr 2097 pandoc URL --to org 2098 pandoc URL --to org >URL 2099 pandoc URL --to md 2100 pandoc URL --to markdown 2101 git add URL 2102 git commit -m 'base' -a 2103 git remote add URL 2104 git remote add origin URL 2105 git add * 2106 git commit -m 'paper step1' -a 2107 git push 2108 git pull 2109 git config URL true # rebase 2110 git pull 2111 git commit -m 'merge' -a 2112 git push 2113 cp ~/.gitignore_templates/Emacs.gitignore .gitingnore 2114 cp ~/.gitignore_templates/Emacs.gitignore .gitignore 2115 git status 2116 git add .gitignore 2117 git commit -m 'clean' -a 2118 ls 2119 mkdir -p 2016/09/27/Heidelberg/HLF2015/Unimath 2120 mv Unimath* 2016/09/27/Heidelberg/HLF2015/Unimath/ 2121 git status 2122 git add 2016 2123 git commit -m 'moving' -a 2124 git push 2125 mv 2016/09/27/Heidelberg/HLF2015 016/09/27/Heidelberg/HLF2016 2126 git add 2016/ 2127 git commit -m 'move' -a 2128 git push 2129 history
[ "# rebase\n 2110 git pull\n 2111 git commit -m 'merge' -a\n 2112 git push\n 2113 cp ~/.gitignore_templates/Emacs.gitignore .gitingnore\n 2114 cp ~/.gitignore_templates/Emacs.gitignore .gitignore\n 2115 git status\n 2116 git add .gitignore \n 2117 git commit -m 'clean' -a\n 2118 ls\n 2119 mkdir -p 2016/09/27/Heidelberg/HLF2015/Unimath\n 2120 mv Unimath* 2016/09/27/Heidelberg/HLF2015/Unimath/\n 2121 git status\n 2122 git add 2016\n 2123 git commit -m 'moving' -a\n 2124 git push\n 2125 mv 2016/09/27/Heidelberg/HLF2015 016/09/27/Heidelberg/HLF2016\n 2126 git add 2016/\n 2127 git commit -m 'move' -a\n 2128 git push\n 2129 history" ]
[ "TAGS\n#license-creativeml-openrail-m #region-us \n", "# rebase\n 2110 git pull\n 2111 git commit -m 'merge' -a\n 2112 git push\n 2113 cp ~/.gitignore_templates/Emacs.gitignore .gitingnore\n 2114 cp ~/.gitignore_templates/Emacs.gitignore .gitignore\n 2115 git status\n 2116 git add .gitignore \n 2117 git commit -m 'clean' -a\n 2118 ls\n 2119 mkdir -p 2016/09/27/Heidelberg/HLF2015/Unimath\n 2120 mv Unimath* 2016/09/27/Heidelberg/HLF2015/Unimath/\n 2121 git status\n 2122 git add 2016\n 2123 git commit -m 'moving' -a\n 2124 git push\n 2125 mv 2016/09/27/Heidelberg/HLF2015 016/09/27/Heidelberg/HLF2016\n 2126 git add 2016/\n 2127 git commit -m 'move' -a\n 2128 git push\n 2129 history" ]
27aec47071ce6615332f5dfb1ed2218251a0d7bc
# RepoBench v1.1 (Python) ## Introduction This dataset presents the **Python** portion of [RepoBench](https://arxiv.org/abs/2306.03091) v1.1 (ICLR 2024). The data encompasses a collection from GitHub, spanning the period from **October 6th to November 31st, 2023**. With a commitment to data integrity, we've implemented a deduplication process based on file content against the Stack v2 dataset (coming soon), aiming to mitigate data leakage and memorization concerns. ## Resources and Links - [Paper](https://arxiv.org/abs/2306.03091) - [GitHub](https://github.com/Leolty/repobench) - [Dataset Introduction](https://github.com/Leolty/repobench/blob/main/data/README.md) ## FAQs - **Q:** What do the features in the dataset mean? **A:** Imagine you're coding in Python and you want to write the next line of your code. The dataset provides you the following information: - `repo_name` (string): the name of the repository - `file_path` (string): the path of the current file - `context` (list): the cross-file code snippets that might be helpful for writing the next line: - `identifier` (string): the identifier of the code snippet - `path` (string): the path of the code snippet - `snippet` (string): the code snippet - `import_statement` (string): the import statement of the current file - `cropped_code` (string): the cropped code of the current file (up to previous 120 lines) - `all_code` (string): the entire code of the current file (not cropped) - `next_line` (string): the next line of the code (this serves as the target) - `gold_snippet_index` (int): the index of the gold snippet in the context (which will be used in next line, just for reference, you should not use this for next line prediction) - `created_at` (string): the creation time of the repository - `level` (string): the level of next line completion, which is measured by the number of tokens for the whole prompt (including all the context, import statement, cropped code and some neccessary separator tokens) - **Q:** How does the level be defined? **A:** The level is determined by the number of tokens for the whole prompt (including all the context, import statement, cropped code and some neccessary separator tokens). The token number is calculated by the tokenizer of GPT-4 by using [tiktoken](https://github.com/openai/tiktoken). The following table shows the level definition: | Level | Prompt Length (Number of Tokens) | |-------|------------------------| | 2k | 640 - 1,600 | | 4k | 1,600 - 3,600 | | 8k | 3,600 - 7,200 | | 12k | 7,200 - 10,800 | | 16k | 10,800 - 14,400 | | 24k | 14,400 - 21,600 | | 32k | 21,600 - 28,800 | | 64k | 28,800 - 57,600 | | 128k | 57,600 - 100,000 | - **Q:** What does the different splits mean? **A:** The dataset is split into three parts: - `cross_file_first`: the next line of code utilizes content from a cross-file code snippet and it is its first usage within current file. - `cross_file_random`: the next line of code utilizes content from a cross-file code snippet and it is NOT its first usage within current file. - `in_file`: the next line of code does not utilize content from a cross-file code snippet. - **Q:** How to construct the prompt for next line prediction? **A:** We hereby provide the official implementation for constructing prompts. Please note that the methods described below are not necessarily the optimal way of construction. Reordering, retrieval argumentation, or employing different cropping/construction techniques could potentially lead to varying degrees of improvement. Ensure that your model evaluations are conducted in a fair manner. ```python import re def construct_prompt( data: dict, language: str = "python", tokenizer= None, max_token_nums: int = 15800 ) -> str: """ Construct the prompt for next line prediction. :param data: data point from the dataset :param language: the language of the code :param tokenizer: the tokenizer of the evaluation model :param max_token_nums: the maximum number of tokens constraint for the prompt :return: the constructed prompt """ # comment symbol for different languages comment_symbol = "#" if language == "python" else "//" # construct the cross-file prompt and in-file prompt separately # cross-file prompt cross_file_prompt = f"{comment_symbol} Repo Name: {data['repo_name']}\n" for snippet in data['context']: cross_file_prompt += f"{comment_symbol} Path: {snippet['path']}\n{snippet['snippet']}" + "\n\n" # in-file prompt in_file_prompt = f"{comment_symbol} Path: {data['file_path']}\n{data['import_statement']}\n{data['cropped_code']}\n" # if we assign the tokenizer and the max_token_nums, we will truncate the cross-file prompt to meet the constraint if tokenizer is not None and max_token_nums is not None: cross_file_prompt_token_nums = len(tokenizer.encode(cross_file_prompt)) in_file_prompt_token_nums = len(tokenizer.encode(in_file_prompt)) exceed_token_nums = cross_file_prompt_token_nums + in_file_prompt_token_nums - max_token_nums if exceed_token_nums > 0: # split the cross-file prompt into lines cross_file_prompt_lines = cross_file_prompt.split("\n") # drop lines from end until the extra token number is less than 0 for i in range(len(repo_prompt_lines)-1, -1, -1): extra_token_num -= len(tokenizer.encode(cross_file_prompt_lines[i])) if extra_token_num < 0: break # join the lines back cross_file_prompt = "\n".join(cross_file_prompt_lines[:i+1]) + "\n\n" # combine the cross-file prompt and in-file prompt prompt = cross_file_prompt + in_file_prompt # normalize some empty lines prompt = re.sub(r'\n{4,}', '\n\n', prompt) return prompt ``` - **Q:** How to load the dataset? **A:** You can simply use the following code to load the dataset: ```python from datasets import load_dataset dataset = load_dataset("tianyang/repobench_python_v1.1") ``` To construct the prompt for next line prediction, you can refer to the official implementation provided in the previous question and use the `construct_prompt` function to construct the prompt, for example: ```python from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/deepseek-coder-1.3b-base") model = AutoModelForCausalLM.from_pretrained("deepseek-ai/deepseek-coder-1.3b-base") prompt = construct_prompt(dataset['cross_file_first'][0], tokenizer=tokenizer, max_token_nums=15800) ``` - **Q:** How often will the dataset be updated? **A:** We plan to update the dataset every three months, but there might be slight delays considering the time required for data crawling and our own schedules. If you require updated data, please feel free to contact us, and we can coordinate the timing and expedite the process. - **Q:** What models should I use to evaluate the dataset? **A:** RepoBench is designed to evaluate base models, not those that have been instruction fine-tuned. Please use base models for evaluation. - **Q:** I am training a new model but the knowledge cutoff date is after the dataset's. Can you provide me with the latest data? **A:** Sure! We are happy to provide you with the latest data (even customized data with specific requirements). Please feel free to contact us. - **Q:** Can I opt-out? **A:** Yes, you can opt-out your repository from the dataset. Please check [Am I in RepoBench?](https://huggingface.co/spaces/tianyang/in-the-repobench), we will upload the raw data of the repository information we crawled at least 15 days before the dataset creation and release. We will respect your decision and remove your repository from the dataset if you opt-out. ## Citation If you find RepoBench useful in your research, please consider citing the paper using the following BibTeX entry: ```bibtex @misc{liu2023repobench, title={RepoBench: Benchmarking Repository-Level Code Auto-Completion Systems}, author={Tianyang Liu and Canwen Xu and Julian McAuley}, year={2024}, url={https://arxiv.org/abs/2306.03091}, booktitle={International Conference on Learning Representations} } ``` Your interest and contributions to RepoBench are immensely valued. Happy coding! πŸš€
tianyang/repobench_python_v1.1
[ "task_categories:text-generation", "language:en", "license:cc", "code", "arxiv:2306.03091", "region:us" ]
2024-02-05T11:41:48+00:00
{"language": ["en"], "license": "cc", "task_categories": ["text-generation"], "configs": [{"config_name": "default", "data_files": [{"split": "cross_file_first", "path": "data/cross_file_first-*"}, {"split": "cross_file_random", "path": "data/cross_file_random-*"}, {"split": "in_file", "path": "data/in_file-*"}]}], "dataset_info": {"features": [{"name": "repo_name", "dtype": "string"}, {"name": "file_path", "dtype": "string"}, {"name": "context", "list": [{"name": "identifier", "dtype": "string"}, {"name": "path", "dtype": "string"}, {"name": "snippet", "dtype": "string"}]}, {"name": "import_statement", "dtype": "string"}, {"name": "token_num", "dtype": "int64"}, {"name": "cropped_code", "dtype": "string"}, {"name": "all_code", "dtype": "string"}, {"name": "next_line", "dtype": "string"}, {"name": "gold_snippet_index", "dtype": "int64"}, {"name": "created_at", "dtype": "string"}, {"name": "level", "dtype": "string"}], "splits": [{"name": "cross_file_first", "num_bytes": 504528431, "num_examples": 8033}, {"name": "cross_file_random", "num_bytes": 467242455, "num_examples": 7618}, {"name": "in_file", "num_bytes": 488999100, "num_examples": 7910}], "download_size": 472994299, "dataset_size": 1460769986}, "tags": ["code"]}
2024-02-14T21:28:28+00:00
[ "2306.03091" ]
[ "en" ]
TAGS #task_categories-text-generation #language-English #license-cc #code #arxiv-2306.03091 #region-us
RepoBench v1.1 (Python) ======================= Introduction ------------ This dataset presents the Python portion of RepoBench v1.1 (ICLR 2024). The data encompasses a collection from GitHub, spanning the period from October 6th to November 31st, 2023. With a commitment to data integrity, we've implemented a deduplication process based on file content against the Stack v2 dataset (coming soon), aiming to mitigate data leakage and memorization concerns. Resources and Links ------------------- * Paper * GitHub * Dataset Introduction FAQs ---- * Q: What do the features in the dataset mean? A: Imagine you're coding in Python and you want to write the next line of your code. The dataset provides you the following information: + 'repo\_name' (string): the name of the repository + 'file\_path' (string): the path of the current file + 'context' (list): the cross-file code snippets that might be helpful for writing the next line: - 'identifier' (string): the identifier of the code snippet - 'path' (string): the path of the code snippet - 'snippet' (string): the code snippet + 'import\_statement' (string): the import statement of the current file + 'cropped\_code' (string): the cropped code of the current file (up to previous 120 lines) + 'all\_code' (string): the entire code of the current file (not cropped) + 'next\_line' (string): the next line of the code (this serves as the target) + 'gold\_snippet\_index' (int): the index of the gold snippet in the context (which will be used in next line, just for reference, you should not use this for next line prediction) + 'created\_at' (string): the creation time of the repository + 'level' (string): the level of next line completion, which is measured by the number of tokens for the whole prompt (including all the context, import statement, cropped code and some neccessary separator tokens) * Q: How does the level be defined? A: The level is determined by the number of tokens for the whole prompt (including all the context, import statement, cropped code and some neccessary separator tokens). The token number is calculated by the tokenizer of GPT-4 by using tiktoken. The following table shows the level definition: * Q: What does the different splits mean? A: The dataset is split into three parts: + 'cross\_file\_first': the next line of code utilizes content from a cross-file code snippet and it is its first usage within current file. + 'cross\_file\_random': the next line of code utilizes content from a cross-file code snippet and it is NOT its first usage within current file. + 'in\_file': the next line of code does not utilize content from a cross-file code snippet. * Q: How to construct the prompt for next line prediction? A: We hereby provide the official implementation for constructing prompts. Please note that the methods described below are not necessarily the optimal way of construction. Reordering, retrieval argumentation, or employing different cropping/construction techniques could potentially lead to varying degrees of improvement. Ensure that your model evaluations are conducted in a fair manner. * Q: How to load the dataset? A: You can simply use the following code to load the dataset: To construct the prompt for next line prediction, you can refer to the official implementation provided in the previous question and use the 'construct\_prompt' function to construct the prompt, for example: * Q: How often will the dataset be updated? A: We plan to update the dataset every three months, but there might be slight delays considering the time required for data crawling and our own schedules. If you require updated data, please feel free to contact us, and we can coordinate the timing and expedite the process. * Q: What models should I use to evaluate the dataset? A: RepoBench is designed to evaluate base models, not those that have been instruction fine-tuned. Please use base models for evaluation. * Q: I am training a new model but the knowledge cutoff date is after the dataset's. Can you provide me with the latest data? A: Sure! We are happy to provide you with the latest data (even customized data with specific requirements). Please feel free to contact us. * Q: Can I opt-out? A: Yes, you can opt-out your repository from the dataset. Please check Am I in RepoBench?, we will upload the raw data of the repository information we crawled at least 15 days before the dataset creation and release. We will respect your decision and remove your repository from the dataset if you opt-out. If you find RepoBench useful in your research, please consider citing the paper using the following BibTeX entry: Your interest and contributions to RepoBench are immensely valued. Happy coding!
[]
[ "TAGS\n#task_categories-text-generation #language-English #license-cc #code #arxiv-2306.03091 #region-us \n" ]
fbc70c60cee4f4dde02e140e1e9144d02c349269
# Dataset Card for Evaluation run of KnutJaegersberg/Deita-20b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [KnutJaegersberg/Deita-20b](https://huggingface.co/KnutJaegersberg/Deita-20b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_KnutJaegersberg__Deita-20b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-05T12:16:25.639871](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__Deita-20b/blob/main/results_2024-02-05T12-16-25.639871.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6757240714584833, "acc_stderr": 0.03164629743117511, "acc_norm": 0.6760877791494632, "acc_norm_stderr": 0.03231492887299232, "mc1": 0.41370869033047736, "mc1_stderr": 0.0172408618120998, "mc2": 0.572881968590399, "mc2_stderr": 0.015288640690271185 }, "harness|arc:challenge|25": { "acc": 0.6015358361774744, "acc_stderr": 0.014306946052735562, "acc_norm": 0.6390784982935154, "acc_norm_stderr": 0.014034761386175456 }, "harness|hellaswag|10": { "acc": 0.6315475004979088, "acc_stderr": 0.004813991069808273, "acc_norm": 0.8311093407687712, "acc_norm_stderr": 0.003738896244953813 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595853, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595853 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8092105263157895, "acc_stderr": 0.031975658210325, "acc_norm": 0.8092105263157895, "acc_norm_stderr": 0.031975658210325 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.71, "acc_stderr": 0.04560480215720683, "acc_norm": 0.71, "acc_norm_stderr": 0.04560480215720683 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7396226415094339, "acc_stderr": 0.027008766090708052, "acc_norm": 0.7396226415094339, "acc_norm_stderr": 0.027008766090708052 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8055555555555556, "acc_stderr": 0.03309615177059005, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.03309615177059005 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.036430371689585475, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.036430371689585475 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.45098039215686275, "acc_stderr": 0.049512182523962625, "acc_norm": 0.45098039215686275, "acc_norm_stderr": 0.049512182523962625 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6595744680851063, "acc_stderr": 0.030976692998534432, "acc_norm": 0.6595744680851063, "acc_norm_stderr": 0.030976692998534432 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.047028804320496165, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.047028804320496165 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6206896551724138, "acc_stderr": 0.040434618619167466, "acc_norm": 0.6206896551724138, "acc_norm_stderr": 0.040434618619167466 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.5105820105820106, "acc_stderr": 0.02574554227604549, "acc_norm": 0.5105820105820106, "acc_norm_stderr": 0.02574554227604549 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5238095238095238, "acc_stderr": 0.04467062628403273, "acc_norm": 0.5238095238095238, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8290322580645161, "acc_stderr": 0.02141724293632159, "acc_norm": 0.8290322580645161, "acc_norm_stderr": 0.02141724293632159 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5911330049261084, "acc_stderr": 0.034590588158832314, "acc_norm": 0.5911330049261084, "acc_norm_stderr": 0.034590588158832314 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.74, "acc_stderr": 0.044084400227680794, "acc_norm": 0.74, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.793939393939394, "acc_stderr": 0.03158415324047712, "acc_norm": 0.793939393939394, "acc_norm_stderr": 0.03158415324047712 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8704663212435233, "acc_stderr": 0.024233532297758723, "acc_norm": 0.8704663212435233, "acc_norm_stderr": 0.024233532297758723 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6794871794871795, "acc_stderr": 0.02366129639396428, "acc_norm": 0.6794871794871795, "acc_norm_stderr": 0.02366129639396428 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3814814814814815, "acc_stderr": 0.0296167189274976, "acc_norm": 0.3814814814814815, "acc_norm_stderr": 0.0296167189274976 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7352941176470589, "acc_stderr": 0.028657491285071952, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.028657491285071952 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4105960264900662, "acc_stderr": 0.04016689594849927, "acc_norm": 0.4105960264900662, "acc_norm_stderr": 0.04016689594849927 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.015173141845126255, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.015173141845126255 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5462962962962963, "acc_stderr": 0.03395322726375798, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.03395322726375798 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8529411764705882, "acc_stderr": 0.02485747808025045, "acc_norm": 0.8529411764705882, "acc_norm_stderr": 0.02485747808025045 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8523206751054853, "acc_stderr": 0.023094329582595698, "acc_norm": 0.8523206751054853, "acc_norm_stderr": 0.023094329582595698 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7040358744394619, "acc_stderr": 0.03063659134869982, "acc_norm": 0.7040358744394619, "acc_norm_stderr": 0.03063659134869982 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6946564885496184, "acc_stderr": 0.04039314978724562, "acc_norm": 0.6946564885496184, "acc_norm_stderr": 0.04039314978724562 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8181818181818182, "acc_stderr": 0.035208939510976534, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.035208939510976534 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243838, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243838 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.04726835553719099, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.04726835553719099 }, "harness|hendrycksTest-management|5": { "acc": 0.8252427184466019, "acc_stderr": 0.03760178006026621, "acc_norm": 0.8252427184466019, "acc_norm_stderr": 0.03760178006026621 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9017094017094017, "acc_stderr": 0.019503444900757567, "acc_norm": 0.9017094017094017, "acc_norm_stderr": 0.019503444900757567 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8122605363984674, "acc_stderr": 0.013964393769899115, "acc_norm": 0.8122605363984674, "acc_norm_stderr": 0.013964393769899115 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7341040462427746, "acc_stderr": 0.02378620325550829, "acc_norm": 0.7341040462427746, "acc_norm_stderr": 0.02378620325550829 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.37094972067039106, "acc_stderr": 0.01615591072134177, "acc_norm": 0.37094972067039106, "acc_norm_stderr": 0.01615591072134177 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7549019607843137, "acc_stderr": 0.024630048979824775, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.024630048979824775 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.77491961414791, "acc_stderr": 0.023720088516179027, "acc_norm": 0.77491961414791, "acc_norm_stderr": 0.023720088516179027 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46808510638297873, "acc_stderr": 0.029766675075873862, "acc_norm": 0.46808510638297873, "acc_norm_stderr": 0.029766675075873862 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4973924380704042, "acc_stderr": 0.012770062445433175, "acc_norm": 0.4973924380704042, "acc_norm_stderr": 0.012770062445433175 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7022058823529411, "acc_stderr": 0.027778298701545436, "acc_norm": 0.7022058823529411, "acc_norm_stderr": 0.027778298701545436 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6879084967320261, "acc_stderr": 0.01874501120127766, "acc_norm": 0.6879084967320261, "acc_norm_stderr": 0.01874501120127766 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8, "acc_stderr": 0.02560737598657916, "acc_norm": 0.8, "acc_norm_stderr": 0.02560737598657916 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578337, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578337 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866767, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866767 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8011695906432749, "acc_stderr": 0.03061111655743253, "acc_norm": 0.8011695906432749, "acc_norm_stderr": 0.03061111655743253 }, "harness|truthfulqa:mc|0": { "mc1": 0.41370869033047736, "mc1_stderr": 0.0172408618120998, "mc2": 0.572881968590399, "mc2_stderr": 0.015288640690271185 }, "harness|winogrande|5": { "acc": 0.846093133385951, "acc_stderr": 0.01014194452375004 }, "harness|gsm8k|5": { "acc": 0.7210007581501138, "acc_stderr": 0.012354115779970311 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_KnutJaegersberg__Deita-20b
[ "region:us" ]
2024-02-05T12:18:33+00:00
{"pretty_name": "Evaluation run of KnutJaegersberg/Deita-20b", "dataset_summary": "Dataset automatically created during the evaluation run of model [KnutJaegersberg/Deita-20b](https://huggingface.co/KnutJaegersberg/Deita-20b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_KnutJaegersberg__Deita-20b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-05T12:16:25.639871](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__Deita-20b/blob/main/results_2024-02-05T12-16-25.639871.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6757240714584833,\n \"acc_stderr\": 0.03164629743117511,\n \"acc_norm\": 0.6760877791494632,\n \"acc_norm_stderr\": 0.03231492887299232,\n \"mc1\": 0.41370869033047736,\n \"mc1_stderr\": 0.0172408618120998,\n \"mc2\": 0.572881968590399,\n \"mc2_stderr\": 0.015288640690271185\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6015358361774744,\n \"acc_stderr\": 0.014306946052735562,\n \"acc_norm\": 0.6390784982935154,\n \"acc_norm_stderr\": 0.014034761386175456\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6315475004979088,\n \"acc_stderr\": 0.004813991069808273,\n \"acc_norm\": 0.8311093407687712,\n \"acc_norm_stderr\": 0.003738896244953813\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595853,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595853\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8092105263157895,\n \"acc_stderr\": 0.031975658210325,\n \"acc_norm\": 0.8092105263157895,\n \"acc_norm_stderr\": 0.031975658210325\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.04560480215720683,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.04560480215720683\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7396226415094339,\n \"acc_stderr\": 0.027008766090708052,\n \"acc_norm\": 0.7396226415094339,\n \"acc_norm_stderr\": 0.027008766090708052\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.03309615177059005,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.03309615177059005\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.036430371689585475,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.036430371689585475\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.45098039215686275,\n \"acc_stderr\": 0.049512182523962625,\n \"acc_norm\": 0.45098039215686275,\n \"acc_norm_stderr\": 0.049512182523962625\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6595744680851063,\n \"acc_stderr\": 0.030976692998534432,\n \"acc_norm\": 0.6595744680851063,\n \"acc_norm_stderr\": 0.030976692998534432\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.047028804320496165,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.047028804320496165\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6206896551724138,\n \"acc_stderr\": 0.040434618619167466,\n \"acc_norm\": 0.6206896551724138,\n \"acc_norm_stderr\": 0.040434618619167466\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.5105820105820106,\n \"acc_stderr\": 0.02574554227604549,\n \"acc_norm\": 0.5105820105820106,\n \"acc_norm_stderr\": 0.02574554227604549\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5238095238095238,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.5238095238095238,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8290322580645161,\n \"acc_stderr\": 0.02141724293632159,\n \"acc_norm\": 0.8290322580645161,\n \"acc_norm_stderr\": 0.02141724293632159\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5911330049261084,\n \"acc_stderr\": 0.034590588158832314,\n \"acc_norm\": 0.5911330049261084,\n \"acc_norm_stderr\": 0.034590588158832314\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.03158415324047712,\n \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.03158415324047712\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8704663212435233,\n \"acc_stderr\": 0.024233532297758723,\n \"acc_norm\": 0.8704663212435233,\n \"acc_norm_stderr\": 0.024233532297758723\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6794871794871795,\n \"acc_stderr\": 0.02366129639396428,\n \"acc_norm\": 0.6794871794871795,\n \"acc_norm_stderr\": 0.02366129639396428\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3814814814814815,\n \"acc_stderr\": 0.0296167189274976,\n \"acc_norm\": 0.3814814814814815,\n \"acc_norm_stderr\": 0.0296167189274976\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.028657491285071952,\n \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.028657491285071952\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4105960264900662,\n \"acc_stderr\": 0.04016689594849927,\n \"acc_norm\": 0.4105960264900662,\n \"acc_norm_stderr\": 0.04016689594849927\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.015173141845126255,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.015173141845126255\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5462962962962963,\n \"acc_stderr\": 0.03395322726375798,\n \"acc_norm\": 0.5462962962962963,\n \"acc_norm_stderr\": 0.03395322726375798\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.02485747808025045,\n \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.02485747808025045\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8523206751054853,\n \"acc_stderr\": 0.023094329582595698,\n \"acc_norm\": 0.8523206751054853,\n \"acc_norm_stderr\": 0.023094329582595698\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7040358744394619,\n \"acc_stderr\": 0.03063659134869982,\n \"acc_norm\": 0.7040358744394619,\n \"acc_norm_stderr\": 0.03063659134869982\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6946564885496184,\n \"acc_stderr\": 0.04039314978724562,\n \"acc_norm\": 0.6946564885496184,\n \"acc_norm_stderr\": 0.04039314978724562\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.035208939510976534,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.035208939510976534\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243838,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243838\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.04726835553719099,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.04726835553719099\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.03760178006026621,\n \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.03760178006026621\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9017094017094017,\n \"acc_stderr\": 0.019503444900757567,\n \"acc_norm\": 0.9017094017094017,\n \"acc_norm_stderr\": 0.019503444900757567\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8122605363984674,\n \"acc_stderr\": 0.013964393769899115,\n \"acc_norm\": 0.8122605363984674,\n \"acc_norm_stderr\": 0.013964393769899115\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7341040462427746,\n \"acc_stderr\": 0.02378620325550829,\n \"acc_norm\": 0.7341040462427746,\n \"acc_norm_stderr\": 0.02378620325550829\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.37094972067039106,\n \"acc_stderr\": 0.01615591072134177,\n \"acc_norm\": 0.37094972067039106,\n \"acc_norm_stderr\": 0.01615591072134177\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.024630048979824775,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.024630048979824775\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.77491961414791,\n \"acc_stderr\": 0.023720088516179027,\n \"acc_norm\": 0.77491961414791,\n \"acc_norm_stderr\": 0.023720088516179027\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46808510638297873,\n \"acc_stderr\": 0.029766675075873862,\n \"acc_norm\": 0.46808510638297873,\n \"acc_norm_stderr\": 0.029766675075873862\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4973924380704042,\n \"acc_stderr\": 0.012770062445433175,\n \"acc_norm\": 0.4973924380704042,\n \"acc_norm_stderr\": 0.012770062445433175\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7022058823529411,\n \"acc_stderr\": 0.027778298701545436,\n \"acc_norm\": 0.7022058823529411,\n \"acc_norm_stderr\": 0.027778298701545436\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6879084967320261,\n \"acc_stderr\": 0.01874501120127766,\n \"acc_norm\": 0.6879084967320261,\n \"acc_norm_stderr\": 0.01874501120127766\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.02560737598657916,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.02560737598657916\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578337,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578337\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8011695906432749,\n \"acc_stderr\": 0.03061111655743253,\n \"acc_norm\": 0.8011695906432749,\n \"acc_norm_stderr\": 0.03061111655743253\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.41370869033047736,\n \"mc1_stderr\": 0.0172408618120998,\n \"mc2\": 0.572881968590399,\n \"mc2_stderr\": 0.015288640690271185\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.846093133385951,\n \"acc_stderr\": 0.01014194452375004\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7210007581501138,\n \"acc_stderr\": 0.012354115779970311\n }\n}\n```", "repo_url": "https://huggingface.co/KnutJaegersberg/Deita-20b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|arc:challenge|25_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|gsm8k|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hellaswag|10_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T12-16-25.639871.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["**/details_harness|winogrande|5_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-05T12-16-25.639871.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_05T12_16_25.639871", "path": ["results_2024-02-05T12-16-25.639871.parquet"]}, {"split": "latest", "path": ["results_2024-02-05T12-16-25.639871.parquet"]}]}]}
2024-02-05T12:18:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of KnutJaegersberg/Deita-20b Dataset automatically created during the evaluation run of model KnutJaegersberg/Deita-20b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-05T12:16:25.639871(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of KnutJaegersberg/Deita-20b\n\n\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/Deita-20b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T12:16:25.639871(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of KnutJaegersberg/Deita-20b\n\n\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/Deita-20b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T12:16:25.639871(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
89e4ef3af12fbead0e1fe7aa6217ce490919c7b4
# Dataset Card for Evaluation run of Kquant03/Azathoth-16x7B-bf16 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Kquant03/Azathoth-16x7B-bf16](https://huggingface.co/Kquant03/Azathoth-16x7B-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Kquant03__Azathoth-16x7B-bf16", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-05T13:02:29.525872](https://huggingface.co/datasets/open-llm-leaderboard/details_Kquant03__Azathoth-16x7B-bf16/blob/main/results_2024-02-05T13-02-29.525872.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6527351745375014, "acc_stderr": 0.03209438096547266, "acc_norm": 0.6516832663399301, "acc_norm_stderr": 0.03277230372243864, "mc1": 0.576499388004896, "mc1_stderr": 0.01729742144853475, "mc2": 0.6960737340136303, "mc2_stderr": 0.01510691757213546 }, "harness|arc:challenge|25": { "acc": 0.7158703071672355, "acc_stderr": 0.013179442447653886, "acc_norm": 0.7380546075085325, "acc_norm_stderr": 0.012849054826858107 }, "harness|hellaswag|10": { "acc": 0.7253535152360088, "acc_stderr": 0.004454237797448359, "acc_norm": 0.8886675960963951, "acc_norm_stderr": 0.003139004815925866 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6518518518518519, "acc_stderr": 0.041153246103369526, "acc_norm": 0.6518518518518519, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695238, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695238 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7169811320754716, "acc_stderr": 0.027724236492700918, "acc_norm": 0.7169811320754716, "acc_norm_stderr": 0.027724236492700918 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.75, "acc_stderr": 0.03621034121889507, "acc_norm": 0.75, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816506, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.032400380867927465, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.032400380867927465 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.47368421052631576, "acc_stderr": 0.04697085136647863, "acc_norm": 0.47368421052631576, "acc_norm_stderr": 0.04697085136647863 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.04154659671707548, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.04154659671707548 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.02530590624159063, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.02530590624159063 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7903225806451613, "acc_stderr": 0.023157879349083525, "acc_norm": 0.7903225806451613, "acc_norm_stderr": 0.023157879349083525 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.032568666616811015, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.032568666616811015 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8080808080808081, "acc_stderr": 0.028057791672989017, "acc_norm": 0.8080808080808081, "acc_norm_stderr": 0.028057791672989017 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9067357512953368, "acc_stderr": 0.02098685459328973, "acc_norm": 0.9067357512953368, "acc_norm_stderr": 0.02098685459328973 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402538, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402538 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.02840653309060846, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.02840653309060846 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.03038835355188679, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.03038835355188679 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.015630022970092434, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.015630022970092434 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5185185185185185, "acc_stderr": 0.03407632093854051, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.03407632093854051 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8529411764705882, "acc_stderr": 0.024857478080250447, "acc_norm": 0.8529411764705882, "acc_norm_stderr": 0.024857478080250447 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7848101265822784, "acc_stderr": 0.026750826994676177, "acc_norm": 0.7848101265822784, "acc_norm_stderr": 0.026750826994676177 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6816143497757847, "acc_stderr": 0.03126580522513713, "acc_norm": 0.6816143497757847, "acc_norm_stderr": 0.03126580522513713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.036412970813137296, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.036412970813137296 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.02093019318517933, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.02093019318517933 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8212005108556832, "acc_stderr": 0.013702643715368982, "acc_norm": 0.8212005108556832, "acc_norm_stderr": 0.013702643715368982 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7369942196531792, "acc_stderr": 0.023703099525258176, "acc_norm": 0.7369942196531792, "acc_norm_stderr": 0.023703099525258176 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4435754189944134, "acc_stderr": 0.016615680401003724, "acc_norm": 0.4435754189944134, "acc_norm_stderr": 0.016615680401003724 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137897, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137897 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4929078014184397, "acc_stderr": 0.02982449855912901, "acc_norm": 0.4929078014184397, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46740547588005216, "acc_stderr": 0.012743072942653345, "acc_norm": 0.46740547588005216, "acc_norm_stderr": 0.012743072942653345 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.028418208619406762, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.028418208619406762 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6650326797385621, "acc_stderr": 0.019094228167000328, "acc_norm": 0.6650326797385621, "acc_norm_stderr": 0.019094228167000328 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.02797982353874455, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.02650859065623327, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.02650859065623327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.03588702812826371, "acc_norm": 0.85, "acc_norm_stderr": 0.03588702812826371 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.576499388004896, "mc1_stderr": 0.01729742144853475, "mc2": 0.6960737340136303, "mc2_stderr": 0.01510691757213546 }, "harness|winogrande|5": { "acc": 0.8547750591949487, "acc_stderr": 0.009902153904760817 }, "harness|gsm8k|5": { "acc": 0.6982562547384382, "acc_stderr": 0.012643544762873358 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Kquant03__Azathoth-16x7B-bf16
[ "region:us" ]
2024-02-05T13:04:48+00:00
{"pretty_name": "Evaluation run of Kquant03/Azathoth-16x7B-bf16", "dataset_summary": "Dataset automatically created during the evaluation run of model [Kquant03/Azathoth-16x7B-bf16](https://huggingface.co/Kquant03/Azathoth-16x7B-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Kquant03__Azathoth-16x7B-bf16\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-05T13:02:29.525872](https://huggingface.co/datasets/open-llm-leaderboard/details_Kquant03__Azathoth-16x7B-bf16/blob/main/results_2024-02-05T13-02-29.525872.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6527351745375014,\n \"acc_stderr\": 0.03209438096547266,\n \"acc_norm\": 0.6516832663399301,\n \"acc_norm_stderr\": 0.03277230372243864,\n \"mc1\": 0.576499388004896,\n \"mc1_stderr\": 0.01729742144853475,\n \"mc2\": 0.6960737340136303,\n \"mc2_stderr\": 0.01510691757213546\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7158703071672355,\n \"acc_stderr\": 0.013179442447653886,\n \"acc_norm\": 0.7380546075085325,\n \"acc_norm_stderr\": 0.012849054826858107\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7253535152360088,\n \"acc_stderr\": 0.004454237797448359,\n \"acc_norm\": 0.8886675960963951,\n \"acc_norm_stderr\": 0.003139004815925866\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695238,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695238\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7169811320754716,\n \"acc_stderr\": 0.027724236492700918,\n \"acc_norm\": 0.7169811320754716,\n \"acc_norm_stderr\": 0.027724236492700918\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.032400380867927465,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.032400380867927465\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.04697085136647863,\n \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.04697085136647863\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.04154659671707548,\n \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.04154659671707548\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.02530590624159063,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.02530590624159063\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.023157879349083525,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.023157879349083525\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.032568666616811015,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.032568666616811015\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8080808080808081,\n \"acc_stderr\": 0.028057791672989017,\n \"acc_norm\": 0.8080808080808081,\n \"acc_norm_stderr\": 0.028057791672989017\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9067357512953368,\n \"acc_stderr\": 0.02098685459328973,\n \"acc_norm\": 0.9067357512953368,\n \"acc_norm_stderr\": 0.02098685459328973\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402538,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402538\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060846,\n \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060846\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.03038835355188679,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.03038835355188679\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.015630022970092434,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.015630022970092434\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5185185185185185,\n \"acc_stderr\": 0.03407632093854051,\n \"acc_norm\": 0.5185185185185185,\n \"acc_norm_stderr\": 0.03407632093854051\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.024857478080250447,\n \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.024857478080250447\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7848101265822784,\n \"acc_stderr\": 0.026750826994676177,\n \"acc_norm\": 0.7848101265822784,\n \"acc_norm_stderr\": 0.026750826994676177\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.036412970813137296,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.036412970813137296\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.02093019318517933,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.02093019318517933\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8212005108556832,\n \"acc_stderr\": 0.013702643715368982,\n \"acc_norm\": 0.8212005108556832,\n \"acc_norm_stderr\": 0.013702643715368982\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7369942196531792,\n \"acc_stderr\": 0.023703099525258176,\n \"acc_norm\": 0.7369942196531792,\n \"acc_norm_stderr\": 0.023703099525258176\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4435754189944134,\n \"acc_stderr\": 0.016615680401003724,\n \"acc_norm\": 0.4435754189944134,\n \"acc_norm_stderr\": 0.016615680401003724\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137897,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137897\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46740547588005216,\n \"acc_stderr\": 0.012743072942653345,\n \"acc_norm\": 0.46740547588005216,\n \"acc_norm_stderr\": 0.012743072942653345\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.028418208619406762,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.028418208619406762\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6650326797385621,\n \"acc_stderr\": 0.019094228167000328,\n \"acc_norm\": 0.6650326797385621,\n \"acc_norm_stderr\": 0.019094228167000328\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.02650859065623327,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.02650859065623327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.576499388004896,\n \"mc1_stderr\": 0.01729742144853475,\n \"mc2\": 0.6960737340136303,\n \"mc2_stderr\": 0.01510691757213546\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8547750591949487,\n \"acc_stderr\": 0.009902153904760817\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6982562547384382,\n \"acc_stderr\": 0.012643544762873358\n }\n}\n```", "repo_url": "https://huggingface.co/Kquant03/Azathoth-16x7B-bf16", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|arc:challenge|25_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|gsm8k|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hellaswag|10_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T13-02-29.525872.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["**/details_harness|winogrande|5_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-05T13-02-29.525872.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_05T13_02_29.525872", "path": ["results_2024-02-05T13-02-29.525872.parquet"]}, {"split": "latest", "path": ["results_2024-02-05T13-02-29.525872.parquet"]}]}]}
2024-02-05T13:05:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Kquant03/Azathoth-16x7B-bf16 Dataset automatically created during the evaluation run of model Kquant03/Azathoth-16x7B-bf16 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-05T13:02:29.525872(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Kquant03/Azathoth-16x7B-bf16\n\n\n\nDataset automatically created during the evaluation run of model Kquant03/Azathoth-16x7B-bf16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T13:02:29.525872(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Kquant03/Azathoth-16x7B-bf16\n\n\n\nDataset automatically created during the evaluation run of model Kquant03/Azathoth-16x7B-bf16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T13:02:29.525872(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
48849f70b0b0517923c29298db9e2fb905fa4334
# Dataset: PhilEO Pre-train A novel 500GB Sentinel-2 dataset of the PhilEO Suite for model pre-training. ## Dataset Details ### Dataset Description The PhilEO Pre-train dataset is a 500GB global dataset of Sentinel-2 images. The data contain 11 bands at 10m resolution in the following order: 0-SCL, 1-B02, 2-B03, 3-B04, 4-B08, 5-B05, 6-B06, 7-B07, 8-B8A, 9-B11, and 10-B12 where SCL is the Scene Classification Layer. - **Curated by:** ESA Phi-lab and Leonardo Labs - **License:** MIT ## Uses The dataset can be used to pre-train models, i.e. train EO Foundation Models. ### Dataset Sources The basic links for the dataset: - **Repository:** http://huggingface.co/datasets/ESA-philab/PhilEO-pretrain ## Citation Casper Fibaek, Luke Camilleri, Andreas Luyts, Nikolaos Dionelis, Bertrand Le Saux, Bagaglini Leonardo, Cascarano Giacomo Donato, and Giorgio Pasquali, β€œThe PhilEO Geospatial Foundation Model Suite,” To appear, 2024.
ESA-philab/PhilEO-pretrain
[ "license:mit", "region:us" ]
2024-02-05T13:25:12+00:00
{"license": "mit"}
2024-02-09T09:14:52+00:00
[]
[]
TAGS #license-mit #region-us
# Dataset: PhilEO Pre-train A novel 500GB Sentinel-2 dataset of the PhilEO Suite for model pre-training. ## Dataset Details ### Dataset Description The PhilEO Pre-train dataset is a 500GB global dataset of Sentinel-2 images. The data contain 11 bands at 10m resolution in the following order: 0-SCL, 1-B02, 2-B03, 3-B04, 4-B08, 5-B05, 6-B06, 7-B07, 8-B8A, 9-B11, and 10-B12 where SCL is the Scene Classification Layer. - Curated by: ESA Phi-lab and Leonardo Labs - License: MIT ## Uses The dataset can be used to pre-train models, i.e. train EO Foundation Models. ### Dataset Sources The basic links for the dataset: - Repository: URL Casper Fibaek, Luke Camilleri, Andreas Luyts, Nikolaos Dionelis, Bertrand Le Saux, Bagaglini Leonardo, Cascarano Giacomo Donato, and Giorgio Pasquali, β€œThe PhilEO Geospatial Foundation Model Suite,” To appear, 2024.
[ "# Dataset: PhilEO Pre-train\n\nA novel 500GB Sentinel-2 dataset of the PhilEO Suite for model pre-training.", "## Dataset Details", "### Dataset Description\n\nThe PhilEO Pre-train dataset is a 500GB global dataset of Sentinel-2 images.\n\nThe data contain 11 bands at 10m resolution in the following order: 0-SCL, 1-B02, 2-B03, 3-B04, 4-B08, 5-B05, 6-B06, 7-B07, 8-B8A, 9-B11, and 10-B12 where SCL is the Scene Classification Layer.\n\n- Curated by: ESA Phi-lab and Leonardo Labs\n- License: MIT", "## Uses\n\nThe dataset can be used to pre-train models, i.e. train EO Foundation Models.", "### Dataset Sources\n\nThe basic links for the dataset:\n\n- Repository: URL \n\nCasper Fibaek, Luke Camilleri, Andreas Luyts, Nikolaos Dionelis, Bertrand Le Saux, Bagaglini Leonardo, Cascarano Giacomo Donato, and Giorgio Pasquali, β€œThe PhilEO Geospatial Foundation Model Suite,” To appear, 2024." ]
[ "TAGS\n#license-mit #region-us \n", "# Dataset: PhilEO Pre-train\n\nA novel 500GB Sentinel-2 dataset of the PhilEO Suite for model pre-training.", "## Dataset Details", "### Dataset Description\n\nThe PhilEO Pre-train dataset is a 500GB global dataset of Sentinel-2 images.\n\nThe data contain 11 bands at 10m resolution in the following order: 0-SCL, 1-B02, 2-B03, 3-B04, 4-B08, 5-B05, 6-B06, 7-B07, 8-B8A, 9-B11, and 10-B12 where SCL is the Scene Classification Layer.\n\n- Curated by: ESA Phi-lab and Leonardo Labs\n- License: MIT", "## Uses\n\nThe dataset can be used to pre-train models, i.e. train EO Foundation Models.", "### Dataset Sources\n\nThe basic links for the dataset:\n\n- Repository: URL \n\nCasper Fibaek, Luke Camilleri, Andreas Luyts, Nikolaos Dionelis, Bertrand Le Saux, Bagaglini Leonardo, Cascarano Giacomo Donato, and Giorgio Pasquali, β€œThe PhilEO Geospatial Foundation Model Suite,” To appear, 2024." ]
6a59fd80c20bb1f8f315df2fe3c1b39f4e50d185
# Dataset Card for Evaluation run of Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo](https://huggingface.co/Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Radiantloom__radiantloom-mixtral-8x7b-fusion-dpo", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-05T13:38:34.370463](https://huggingface.co/datasets/open-llm-leaderboard/details_Radiantloom__radiantloom-mixtral-8x7b-fusion-dpo/blob/main/results_2024-02-05T13-38-34.370463.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5985350771148836, "acc_stderr": 0.03326838190262248, "acc_norm": 0.6013482102990094, "acc_norm_stderr": 0.03393728138141875, "mc1": 0.39167686658506734, "mc1_stderr": 0.017087795881769625, "mc2": 0.5519575930975205, "mc2_stderr": 0.015808885254693067 }, "harness|arc:challenge|25": { "acc": 0.5878839590443686, "acc_stderr": 0.014383915302225402, "acc_norm": 0.6348122866894198, "acc_norm_stderr": 0.014070265519268802 }, "harness|hellaswag|10": { "acc": 0.640211113324039, "acc_stderr": 0.004789575163418653, "acc_norm": 0.8249352718581956, "acc_norm_stderr": 0.0037924580005234405 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.562962962962963, "acc_stderr": 0.04284958639753401, "acc_norm": 0.562962962962963, "acc_norm_stderr": 0.04284958639753401 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6381578947368421, "acc_stderr": 0.03910525752849724, "acc_norm": 0.6381578947368421, "acc_norm_stderr": 0.03910525752849724 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.02863723563980089, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.02863723563980089 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6805555555555556, "acc_stderr": 0.03899073687357335, "acc_norm": 0.6805555555555556, "acc_norm_stderr": 0.03899073687357335 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5953757225433526, "acc_stderr": 0.03742461193887248, "acc_norm": 0.5953757225433526, "acc_norm_stderr": 0.03742461193887248 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107224, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107224 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5106382978723404, "acc_stderr": 0.03267862331014063, "acc_norm": 0.5106382978723404, "acc_norm_stderr": 0.03267862331014063 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.40350877192982454, "acc_stderr": 0.046151869625837026, "acc_norm": 0.40350877192982454, "acc_norm_stderr": 0.046151869625837026 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6068965517241379, "acc_stderr": 0.0407032901370707, "acc_norm": 0.6068965517241379, "acc_norm_stderr": 0.0407032901370707 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42063492063492064, "acc_stderr": 0.025424835086924, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.025424835086924 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3412698412698413, "acc_stderr": 0.04240799327574924, "acc_norm": 0.3412698412698413, "acc_norm_stderr": 0.04240799327574924 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.635483870967742, "acc_stderr": 0.02737987122994324, "acc_norm": 0.635483870967742, "acc_norm_stderr": 0.02737987122994324 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.42857142857142855, "acc_stderr": 0.034819048444388045, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.034819048444388045 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.63, "acc_stderr": 0.048523658709390974, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709390974 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7333333333333333, "acc_stderr": 0.03453131801885417, "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.03453131801885417 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7626262626262627, "acc_stderr": 0.030313710538198896, "acc_norm": 0.7626262626262627, "acc_norm_stderr": 0.030313710538198896 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8134715025906736, "acc_stderr": 0.02811209121011746, "acc_norm": 0.8134715025906736, "acc_norm_stderr": 0.02811209121011746 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5692307692307692, "acc_stderr": 0.025106820660539753, "acc_norm": 0.5692307692307692, "acc_norm_stderr": 0.025106820660539753 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.028820884666253255, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.028820884666253255 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6302521008403361, "acc_stderr": 0.03135709599613591, "acc_norm": 0.6302521008403361, "acc_norm_stderr": 0.03135709599613591 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2980132450331126, "acc_stderr": 0.037345356767871984, "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.037345356767871984 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7779816513761468, "acc_stderr": 0.017818849564796634, "acc_norm": 0.7779816513761468, "acc_norm_stderr": 0.017818849564796634 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.03350991604696043, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.03350991604696043 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7401960784313726, "acc_stderr": 0.03077855467869326, "acc_norm": 0.7401960784313726, "acc_norm_stderr": 0.03077855467869326 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7552742616033755, "acc_stderr": 0.027985699387036423, "acc_norm": 0.7552742616033755, "acc_norm_stderr": 0.027985699387036423 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6681614349775785, "acc_stderr": 0.03160295143776679, "acc_norm": 0.6681614349775785, "acc_norm_stderr": 0.03160295143776679 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7557251908396947, "acc_stderr": 0.03768335959728744, "acc_norm": 0.7557251908396947, "acc_norm_stderr": 0.03768335959728744 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8181818181818182, "acc_stderr": 0.03520893951097654, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.03520893951097654 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6851851851851852, "acc_stderr": 0.04489931073591311, "acc_norm": 0.6851851851851852, "acc_norm_stderr": 0.04489931073591311 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6993865030674846, "acc_stderr": 0.03602511318806771, "acc_norm": 0.6993865030674846, "acc_norm_stderr": 0.03602511318806771 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8461538461538461, "acc_stderr": 0.023636873317489298, "acc_norm": 0.8461538461538461, "acc_norm_stderr": 0.023636873317489298 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7854406130268199, "acc_stderr": 0.014680033956893346, "acc_norm": 0.7854406130268199, "acc_norm_stderr": 0.014680033956893346 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.653179190751445, "acc_stderr": 0.025624723994030454, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.025624723994030454 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4134078212290503, "acc_stderr": 0.01646981492840617, "acc_norm": 0.4134078212290503, "acc_norm_stderr": 0.01646981492840617 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6339869281045751, "acc_stderr": 0.02758281141515961, "acc_norm": 0.6339869281045751, "acc_norm_stderr": 0.02758281141515961 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6816720257234726, "acc_stderr": 0.02645722506781102, "acc_norm": 0.6816720257234726, "acc_norm_stderr": 0.02645722506781102 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6851851851851852, "acc_stderr": 0.02584224870090217, "acc_norm": 0.6851851851851852, "acc_norm_stderr": 0.02584224870090217 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46808510638297873, "acc_stderr": 0.029766675075873862, "acc_norm": 0.46808510638297873, "acc_norm_stderr": 0.029766675075873862 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4211212516297262, "acc_stderr": 0.012610325733489906, "acc_norm": 0.4211212516297262, "acc_norm_stderr": 0.012610325733489906 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5919117647058824, "acc_stderr": 0.029855261393483924, "acc_norm": 0.5919117647058824, "acc_norm_stderr": 0.029855261393483924 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6241830065359477, "acc_stderr": 0.01959402113657744, "acc_norm": 0.6241830065359477, "acc_norm_stderr": 0.01959402113657744 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.04607582090719976, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.04607582090719976 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6417910447761194, "acc_stderr": 0.03390393042268815, "acc_norm": 0.6417910447761194, "acc_norm_stderr": 0.03390393042268815 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-virology|5": { "acc": 0.4578313253012048, "acc_stderr": 0.0387862677100236, "acc_norm": 0.4578313253012048, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7719298245614035, "acc_stderr": 0.03218093795602357, "acc_norm": 0.7719298245614035, "acc_norm_stderr": 0.03218093795602357 }, "harness|truthfulqa:mc|0": { "mc1": 0.39167686658506734, "mc1_stderr": 0.017087795881769625, "mc2": 0.5519575930975205, "mc2_stderr": 0.015808885254693067 }, "harness|winogrande|5": { "acc": 0.760852407261247, "acc_stderr": 0.011988541844843914 }, "harness|gsm8k|5": { "acc": 0.5018953752843063, "acc_stderr": 0.013772385765569753 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Radiantloom__radiantloom-mixtral-8x7b-fusion-dpo
[ "region:us" ]
2024-02-05T13:40:52+00:00
{"pretty_name": "Evaluation run of Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo", "dataset_summary": "Dataset automatically created during the evaluation run of model [Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo](https://huggingface.co/Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Radiantloom__radiantloom-mixtral-8x7b-fusion-dpo\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-05T13:38:34.370463](https://huggingface.co/datasets/open-llm-leaderboard/details_Radiantloom__radiantloom-mixtral-8x7b-fusion-dpo/blob/main/results_2024-02-05T13-38-34.370463.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5985350771148836,\n \"acc_stderr\": 0.03326838190262248,\n \"acc_norm\": 0.6013482102990094,\n \"acc_norm_stderr\": 0.03393728138141875,\n \"mc1\": 0.39167686658506734,\n \"mc1_stderr\": 0.017087795881769625,\n \"mc2\": 0.5519575930975205,\n \"mc2_stderr\": 0.015808885254693067\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5878839590443686,\n \"acc_stderr\": 0.014383915302225402,\n \"acc_norm\": 0.6348122866894198,\n \"acc_norm_stderr\": 0.014070265519268802\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.640211113324039,\n \"acc_stderr\": 0.004789575163418653,\n \"acc_norm\": 0.8249352718581956,\n \"acc_norm_stderr\": 0.0037924580005234405\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.562962962962963,\n \"acc_stderr\": 0.04284958639753401,\n \"acc_norm\": 0.562962962962963,\n \"acc_norm_stderr\": 0.04284958639753401\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6381578947368421,\n \"acc_stderr\": 0.03910525752849724,\n \"acc_norm\": 0.6381578947368421,\n \"acc_norm_stderr\": 0.03910525752849724\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6805555555555556,\n \"acc_stderr\": 0.03899073687357335,\n \"acc_norm\": 0.6805555555555556,\n \"acc_norm_stderr\": 0.03899073687357335\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5953757225433526,\n \"acc_stderr\": 0.03742461193887248,\n \"acc_norm\": 0.5953757225433526,\n \"acc_norm_stderr\": 0.03742461193887248\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107224,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107224\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5106382978723404,\n \"acc_stderr\": 0.03267862331014063,\n \"acc_norm\": 0.5106382978723404,\n \"acc_norm_stderr\": 0.03267862331014063\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.40350877192982454,\n \"acc_stderr\": 0.046151869625837026,\n \"acc_norm\": 0.40350877192982454,\n \"acc_norm_stderr\": 0.046151869625837026\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6068965517241379,\n \"acc_stderr\": 0.0407032901370707,\n \"acc_norm\": 0.6068965517241379,\n \"acc_norm_stderr\": 0.0407032901370707\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.025424835086924,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.025424835086924\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3412698412698413,\n \"acc_stderr\": 0.04240799327574924,\n \"acc_norm\": 0.3412698412698413,\n \"acc_norm_stderr\": 0.04240799327574924\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.635483870967742,\n \"acc_stderr\": 0.02737987122994324,\n \"acc_norm\": 0.635483870967742,\n \"acc_norm_stderr\": 0.02737987122994324\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.034819048444388045,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.034819048444388045\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709390974,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709390974\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.03453131801885417,\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.03453131801885417\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7626262626262627,\n \"acc_stderr\": 0.030313710538198896,\n \"acc_norm\": 0.7626262626262627,\n \"acc_norm_stderr\": 0.030313710538198896\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8134715025906736,\n \"acc_stderr\": 0.02811209121011746,\n \"acc_norm\": 0.8134715025906736,\n \"acc_norm_stderr\": 0.02811209121011746\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5692307692307692,\n \"acc_stderr\": 0.025106820660539753,\n \"acc_norm\": 0.5692307692307692,\n \"acc_norm_stderr\": 0.025106820660539753\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.028820884666253255,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.028820884666253255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6302521008403361,\n \"acc_stderr\": 0.03135709599613591,\n \"acc_norm\": 0.6302521008403361,\n \"acc_norm_stderr\": 0.03135709599613591\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2980132450331126,\n \"acc_stderr\": 0.037345356767871984,\n \"acc_norm\": 0.2980132450331126,\n \"acc_norm_stderr\": 0.037345356767871984\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7779816513761468,\n \"acc_stderr\": 0.017818849564796634,\n \"acc_norm\": 0.7779816513761468,\n \"acc_norm_stderr\": 0.017818849564796634\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.03350991604696043,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.03350991604696043\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7401960784313726,\n \"acc_stderr\": 0.03077855467869326,\n \"acc_norm\": 0.7401960784313726,\n \"acc_norm_stderr\": 0.03077855467869326\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7552742616033755,\n \"acc_stderr\": 0.027985699387036423,\n \"acc_norm\": 0.7552742616033755,\n \"acc_norm_stderr\": 0.027985699387036423\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6681614349775785,\n \"acc_stderr\": 0.03160295143776679,\n \"acc_norm\": 0.6681614349775785,\n \"acc_norm_stderr\": 0.03160295143776679\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.03768335959728744,\n \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.03768335959728744\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.03520893951097654,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.03520893951097654\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6851851851851852,\n \"acc_stderr\": 0.04489931073591311,\n \"acc_norm\": 0.6851851851851852,\n \"acc_norm_stderr\": 0.04489931073591311\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6993865030674846,\n \"acc_stderr\": 0.03602511318806771,\n \"acc_norm\": 0.6993865030674846,\n \"acc_norm_stderr\": 0.03602511318806771\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8461538461538461,\n \"acc_stderr\": 0.023636873317489298,\n \"acc_norm\": 0.8461538461538461,\n \"acc_norm_stderr\": 0.023636873317489298\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7854406130268199,\n \"acc_stderr\": 0.014680033956893346,\n \"acc_norm\": 0.7854406130268199,\n \"acc_norm_stderr\": 0.014680033956893346\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.025624723994030454,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.025624723994030454\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4134078212290503,\n \"acc_stderr\": 0.01646981492840617,\n \"acc_norm\": 0.4134078212290503,\n \"acc_norm_stderr\": 0.01646981492840617\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6339869281045751,\n \"acc_stderr\": 0.02758281141515961,\n \"acc_norm\": 0.6339869281045751,\n \"acc_norm_stderr\": 0.02758281141515961\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6816720257234726,\n \"acc_stderr\": 0.02645722506781102,\n \"acc_norm\": 0.6816720257234726,\n \"acc_norm_stderr\": 0.02645722506781102\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6851851851851852,\n \"acc_stderr\": 0.02584224870090217,\n \"acc_norm\": 0.6851851851851852,\n \"acc_norm_stderr\": 0.02584224870090217\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46808510638297873,\n \"acc_stderr\": 0.029766675075873862,\n \"acc_norm\": 0.46808510638297873,\n \"acc_norm_stderr\": 0.029766675075873862\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4211212516297262,\n \"acc_stderr\": 0.012610325733489906,\n \"acc_norm\": 0.4211212516297262,\n \"acc_norm_stderr\": 0.012610325733489906\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5919117647058824,\n \"acc_stderr\": 0.029855261393483924,\n \"acc_norm\": 0.5919117647058824,\n \"acc_norm_stderr\": 0.029855261393483924\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6241830065359477,\n \"acc_stderr\": 0.01959402113657744,\n \"acc_norm\": 0.6241830065359477,\n \"acc_norm_stderr\": 0.01959402113657744\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6417910447761194,\n \"acc_stderr\": 0.03390393042268815,\n \"acc_norm\": 0.6417910447761194,\n \"acc_norm_stderr\": 0.03390393042268815\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4578313253012048,\n \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.4578313253012048,\n \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7719298245614035,\n \"acc_stderr\": 0.03218093795602357,\n \"acc_norm\": 0.7719298245614035,\n \"acc_norm_stderr\": 0.03218093795602357\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.39167686658506734,\n \"mc1_stderr\": 0.017087795881769625,\n \"mc2\": 0.5519575930975205,\n \"mc2_stderr\": 0.015808885254693067\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.760852407261247,\n \"acc_stderr\": 0.011988541844843914\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5018953752843063,\n \"acc_stderr\": 0.013772385765569753\n }\n}\n```", "repo_url": "https://huggingface.co/Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|arc:challenge|25_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|gsm8k|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hellaswag|10_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T13-38-34.370463.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["**/details_harness|winogrande|5_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-05T13-38-34.370463.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_05T13_38_34.370463", "path": ["results_2024-02-05T13-38-34.370463.parquet"]}, {"split": "latest", "path": ["results_2024-02-05T13-38-34.370463.parquet"]}]}]}
2024-02-05T13:41:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo Dataset automatically created during the evaluation run of model Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-05T13:38:34.370463(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo\n\n\n\nDataset automatically created during the evaluation run of model Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T13:38:34.370463(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo\n\n\n\nDataset automatically created during the evaluation run of model Radiantloom/radiantloom-mixtral-8x7b-fusion-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T13:38:34.370463(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
f97aa209c789b8f3b10abab58ebba9f1e590105c
# Dataset Card for Evaluation run of binbi/Ein-72B-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [binbi/Ein-72B-v0.1](https://huggingface.co/binbi/Ein-72B-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_binbi__Ein-72B-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-05T14:07:01.496774](https://huggingface.co/datasets/open-llm-leaderboard/details_binbi__Ein-72B-v0.1/blob/main/results_2024-02-05T14-07-01.496774.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7714774481143158, "acc_stderr": 0.028033284472043257, "acc_norm": 0.7730249049429745, "acc_norm_stderr": 0.028591667108629, "mc1": 0.6646266829865362, "mc1_stderr": 0.016527534039668987, "mc2": 0.7846937103895764, "mc2_stderr": 0.013816866993252642 }, "harness|arc:challenge|25": { "acc": 0.7448805460750854, "acc_stderr": 0.012739038695202104, "acc_norm": 0.765358361774744, "acc_norm_stderr": 0.012383873560768675 }, "harness|hellaswag|10": { "acc": 0.729735112527385, "acc_stderr": 0.00443188978363381, "acc_norm": 0.8919537940649273, "acc_norm_stderr": 0.0030980431017758325 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.7185185185185186, "acc_stderr": 0.038850042458002526, "acc_norm": 0.7185185185185186, "acc_norm_stderr": 0.038850042458002526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.881578947368421, "acc_stderr": 0.026293995855474928, "acc_norm": 0.881578947368421, "acc_norm_stderr": 0.026293995855474928 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.81, "acc_stderr": 0.03942772444036623, "acc_norm": 0.81, "acc_norm_stderr": 0.03942772444036623 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8415094339622642, "acc_stderr": 0.02247652871016772, "acc_norm": 0.8415094339622642, "acc_norm_stderr": 0.02247652871016772 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.9305555555555556, "acc_stderr": 0.021257974822832048, "acc_norm": 0.9305555555555556, "acc_norm_stderr": 0.021257974822832048 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.62, "acc_stderr": 0.04878317312145633, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.52, "acc_stderr": 0.05021167315686779, "acc_norm": 0.52, "acc_norm_stderr": 0.05021167315686779 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7514450867052023, "acc_stderr": 0.03295304696818317, "acc_norm": 0.7514450867052023, "acc_norm_stderr": 0.03295304696818317 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.5588235294117647, "acc_stderr": 0.049406356306056595, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.81, "acc_stderr": 0.03942772444036622, "acc_norm": 0.81, "acc_norm_stderr": 0.03942772444036622 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7872340425531915, "acc_stderr": 0.026754391348039766, "acc_norm": 0.7872340425531915, "acc_norm_stderr": 0.026754391348039766 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5964912280701754, "acc_stderr": 0.04615186962583707, "acc_norm": 0.5964912280701754, "acc_norm_stderr": 0.04615186962583707 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7862068965517242, "acc_stderr": 0.034165204477475494, "acc_norm": 0.7862068965517242, "acc_norm_stderr": 0.034165204477475494 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.6825396825396826, "acc_stderr": 0.023973861998992072, "acc_norm": 0.6825396825396826, "acc_norm_stderr": 0.023973861998992072 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5634920634920635, "acc_stderr": 0.04435932892851466, "acc_norm": 0.5634920634920635, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8870967741935484, "acc_stderr": 0.0180036033258636, "acc_norm": 0.8870967741935484, "acc_norm_stderr": 0.0180036033258636 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6650246305418719, "acc_stderr": 0.033208527423483104, "acc_norm": 0.6650246305418719, "acc_norm_stderr": 0.033208527423483104 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.82, "acc_stderr": 0.038612291966536934, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8545454545454545, "acc_stderr": 0.027530196355066584, "acc_norm": 0.8545454545454545, "acc_norm_stderr": 0.027530196355066584 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9393939393939394, "acc_stderr": 0.016999994927421592, "acc_norm": 0.9393939393939394, "acc_norm_stderr": 0.016999994927421592 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9844559585492227, "acc_stderr": 0.008927492715084315, "acc_norm": 0.9844559585492227, "acc_norm_stderr": 0.008927492715084315 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7974358974358975, "acc_stderr": 0.02037766097037137, "acc_norm": 0.7974358974358975, "acc_norm_stderr": 0.02037766097037137 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.46296296296296297, "acc_stderr": 0.030401786406101507, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.030401786406101507 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8445378151260504, "acc_stderr": 0.023536818625398904, "acc_norm": 0.8445378151260504, "acc_norm_stderr": 0.023536818625398904 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.5562913907284768, "acc_stderr": 0.04056527902281732, "acc_norm": 0.5562913907284768, "acc_norm_stderr": 0.04056527902281732 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9339449541284404, "acc_stderr": 0.01064913148785894, "acc_norm": 0.9339449541284404, "acc_norm_stderr": 0.01064913148785894 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6898148148148148, "acc_stderr": 0.03154696285656627, "acc_norm": 0.6898148148148148, "acc_norm_stderr": 0.03154696285656627 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9068627450980392, "acc_stderr": 0.020397853969426987, "acc_norm": 0.9068627450980392, "acc_norm_stderr": 0.020397853969426987 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9071729957805907, "acc_stderr": 0.018889750550956715, "acc_norm": 0.9071729957805907, "acc_norm_stderr": 0.018889750550956715 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8071748878923767, "acc_stderr": 0.026478240960489365, "acc_norm": 0.8071748878923767, "acc_norm_stderr": 0.026478240960489365 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8702290076335878, "acc_stderr": 0.029473649496907065, "acc_norm": 0.8702290076335878, "acc_norm_stderr": 0.029473649496907065 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.03008309871603522, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.03008309871603522 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8518518518518519, "acc_stderr": 0.03434300243630999, "acc_norm": 0.8518518518518519, "acc_norm_stderr": 0.03434300243630999 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8343558282208589, "acc_stderr": 0.029208296231259104, "acc_norm": 0.8343558282208589, "acc_norm_stderr": 0.029208296231259104 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.6160714285714286, "acc_stderr": 0.04616143075028546, "acc_norm": 0.6160714285714286, "acc_norm_stderr": 0.04616143075028546 }, "harness|hendrycksTest-management|5": { "acc": 0.8640776699029126, "acc_stderr": 0.03393295729761011, "acc_norm": 0.8640776699029126, "acc_norm_stderr": 0.03393295729761011 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9444444444444444, "acc_stderr": 0.015006312806446914, "acc_norm": 0.9444444444444444, "acc_norm_stderr": 0.015006312806446914 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.86, "acc_stderr": 0.034873508801977725, "acc_norm": 0.86, "acc_norm_stderr": 0.034873508801977725 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.9195402298850575, "acc_stderr": 0.009726831316141866, "acc_norm": 0.9195402298850575, "acc_norm_stderr": 0.009726831316141866 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8439306358381503, "acc_stderr": 0.019539014685374036, "acc_norm": 0.8439306358381503, "acc_norm_stderr": 0.019539014685374036 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6994413407821229, "acc_stderr": 0.015334566806251166, "acc_norm": 0.6994413407821229, "acc_norm_stderr": 0.015334566806251166 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8496732026143791, "acc_stderr": 0.02046417512433263, "acc_norm": 0.8496732026143791, "acc_norm_stderr": 0.02046417512433263 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8392282958199357, "acc_stderr": 0.020862388082391894, "acc_norm": 0.8392282958199357, "acc_norm_stderr": 0.020862388082391894 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8703703703703703, "acc_stderr": 0.018689725721062075, "acc_norm": 0.8703703703703703, "acc_norm_stderr": 0.018689725721062075 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6595744680851063, "acc_stderr": 0.028267657482650158, "acc_norm": 0.6595744680851063, "acc_norm_stderr": 0.028267657482650158 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.60625814863103, "acc_stderr": 0.012478532272564435, "acc_norm": 0.60625814863103, "acc_norm_stderr": 0.012478532272564435 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8308823529411765, "acc_stderr": 0.022770868010112987, "acc_norm": 0.8308823529411765, "acc_norm_stderr": 0.022770868010112987 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.8137254901960784, "acc_stderr": 0.01575052628436337, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.01575052628436337 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7727272727272727, "acc_stderr": 0.04013964554072775, "acc_norm": 0.7727272727272727, "acc_norm_stderr": 0.04013964554072775 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8326530612244898, "acc_stderr": 0.02389714476891452, "acc_norm": 0.8326530612244898, "acc_norm_stderr": 0.02389714476891452 }, "harness|hendrycksTest-sociology|5": { "acc": 0.9054726368159204, "acc_stderr": 0.020687186951534094, "acc_norm": 0.9054726368159204, "acc_norm_stderr": 0.020687186951534094 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.93, "acc_stderr": 0.0256432399976243, "acc_norm": 0.93, "acc_norm_stderr": 0.0256432399976243 }, "harness|hendrycksTest-virology|5": { "acc": 0.5783132530120482, "acc_stderr": 0.038444531817709175, "acc_norm": 0.5783132530120482, "acc_norm_stderr": 0.038444531817709175 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.02517298435015578, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.02517298435015578 }, "harness|truthfulqa:mc|0": { "mc1": 0.6646266829865362, "mc1_stderr": 0.016527534039668987, "mc2": 0.7846937103895764, "mc2_stderr": 0.013816866993252642 }, "harness|winogrande|5": { "acc": 0.840568271507498, "acc_stderr": 0.010288617479454764 }, "harness|gsm8k|5": { "acc": 0.7937831690674754, "acc_stderr": 0.011144364089781438 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_binbi__Ein-72B-v0.1
[ "region:us" ]
2024-02-05T13:55:44+00:00
{"pretty_name": "Evaluation run of binbi/Ein-72B-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [binbi/Ein-72B-v0.1](https://huggingface.co/binbi/Ein-72B-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_binbi__Ein-72B-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-05T14:07:01.496774](https://huggingface.co/datasets/open-llm-leaderboard/details_binbi__Ein-72B-v0.1/blob/main/results_2024-02-05T14-07-01.496774.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7714774481143158,\n \"acc_stderr\": 0.028033284472043257,\n \"acc_norm\": 0.7730249049429745,\n \"acc_norm_stderr\": 0.028591667108629,\n \"mc1\": 0.6646266829865362,\n \"mc1_stderr\": 0.016527534039668987,\n \"mc2\": 0.7846937103895764,\n \"mc2_stderr\": 0.013816866993252642\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7448805460750854,\n \"acc_stderr\": 0.012739038695202104,\n \"acc_norm\": 0.765358361774744,\n \"acc_norm_stderr\": 0.012383873560768675\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.729735112527385,\n \"acc_stderr\": 0.00443188978363381,\n \"acc_norm\": 0.8919537940649273,\n \"acc_norm_stderr\": 0.0030980431017758325\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.7185185185185186,\n \"acc_stderr\": 0.038850042458002526,\n \"acc_norm\": 0.7185185185185186,\n \"acc_norm_stderr\": 0.038850042458002526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.881578947368421,\n \"acc_stderr\": 0.026293995855474928,\n \"acc_norm\": 0.881578947368421,\n \"acc_norm_stderr\": 0.026293995855474928\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.03942772444036623,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.03942772444036623\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8415094339622642,\n \"acc_stderr\": 0.02247652871016772,\n \"acc_norm\": 0.8415094339622642,\n \"acc_norm_stderr\": 0.02247652871016772\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.9305555555555556,\n \"acc_stderr\": 0.021257974822832048,\n \"acc_norm\": 0.9305555555555556,\n \"acc_norm_stderr\": 0.021257974822832048\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.05021167315686779,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.05021167315686779\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7514450867052023,\n \"acc_stderr\": 0.03295304696818317,\n \"acc_norm\": 0.7514450867052023,\n \"acc_norm_stderr\": 0.03295304696818317\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.03942772444036622,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.03942772444036622\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7872340425531915,\n \"acc_stderr\": 0.026754391348039766,\n \"acc_norm\": 0.7872340425531915,\n \"acc_norm_stderr\": 0.026754391348039766\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5964912280701754,\n \"acc_stderr\": 0.04615186962583707,\n \"acc_norm\": 0.5964912280701754,\n \"acc_norm_stderr\": 0.04615186962583707\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7862068965517242,\n \"acc_stderr\": 0.034165204477475494,\n \"acc_norm\": 0.7862068965517242,\n \"acc_norm_stderr\": 0.034165204477475494\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.6825396825396826,\n \"acc_stderr\": 0.023973861998992072,\n \"acc_norm\": 0.6825396825396826,\n \"acc_norm_stderr\": 0.023973861998992072\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5634920634920635,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.5634920634920635,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8870967741935484,\n \"acc_stderr\": 0.0180036033258636,\n \"acc_norm\": 0.8870967741935484,\n \"acc_norm_stderr\": 0.0180036033258636\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6650246305418719,\n \"acc_stderr\": 0.033208527423483104,\n \"acc_norm\": 0.6650246305418719,\n \"acc_norm_stderr\": 0.033208527423483104\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8545454545454545,\n \"acc_stderr\": 0.027530196355066584,\n \"acc_norm\": 0.8545454545454545,\n \"acc_norm_stderr\": 0.027530196355066584\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9393939393939394,\n \"acc_stderr\": 0.016999994927421592,\n \"acc_norm\": 0.9393939393939394,\n \"acc_norm_stderr\": 0.016999994927421592\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9844559585492227,\n \"acc_stderr\": 0.008927492715084315,\n \"acc_norm\": 0.9844559585492227,\n \"acc_norm_stderr\": 0.008927492715084315\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7974358974358975,\n \"acc_stderr\": 0.02037766097037137,\n \"acc_norm\": 0.7974358974358975,\n \"acc_norm_stderr\": 0.02037766097037137\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.46296296296296297,\n \"acc_stderr\": 0.030401786406101507,\n \"acc_norm\": 0.46296296296296297,\n \"acc_norm_stderr\": 0.030401786406101507\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8445378151260504,\n \"acc_stderr\": 0.023536818625398904,\n \"acc_norm\": 0.8445378151260504,\n \"acc_norm_stderr\": 0.023536818625398904\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.5562913907284768,\n \"acc_stderr\": 0.04056527902281732,\n \"acc_norm\": 0.5562913907284768,\n \"acc_norm_stderr\": 0.04056527902281732\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9339449541284404,\n \"acc_stderr\": 0.01064913148785894,\n \"acc_norm\": 0.9339449541284404,\n \"acc_norm_stderr\": 0.01064913148785894\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6898148148148148,\n \"acc_stderr\": 0.03154696285656627,\n \"acc_norm\": 0.6898148148148148,\n \"acc_norm_stderr\": 0.03154696285656627\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9068627450980392,\n \"acc_stderr\": 0.020397853969426987,\n \"acc_norm\": 0.9068627450980392,\n \"acc_norm_stderr\": 0.020397853969426987\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9071729957805907,\n \"acc_stderr\": 0.018889750550956715,\n \"acc_norm\": 0.9071729957805907,\n \"acc_norm_stderr\": 0.018889750550956715\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8071748878923767,\n \"acc_stderr\": 0.026478240960489365,\n \"acc_norm\": 0.8071748878923767,\n \"acc_norm_stderr\": 0.026478240960489365\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8702290076335878,\n \"acc_stderr\": 0.029473649496907065,\n \"acc_norm\": 0.8702290076335878,\n \"acc_norm_stderr\": 0.029473649496907065\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.03008309871603522,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.03008309871603522\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8518518518518519,\n \"acc_stderr\": 0.03434300243630999,\n \"acc_norm\": 0.8518518518518519,\n \"acc_norm_stderr\": 0.03434300243630999\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8343558282208589,\n \"acc_stderr\": 0.029208296231259104,\n \"acc_norm\": 0.8343558282208589,\n \"acc_norm_stderr\": 0.029208296231259104\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.6160714285714286,\n \"acc_stderr\": 0.04616143075028546,\n \"acc_norm\": 0.6160714285714286,\n \"acc_norm_stderr\": 0.04616143075028546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8640776699029126,\n \"acc_stderr\": 0.03393295729761011,\n \"acc_norm\": 0.8640776699029126,\n \"acc_norm_stderr\": 0.03393295729761011\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9444444444444444,\n \"acc_stderr\": 0.015006312806446914,\n \"acc_norm\": 0.9444444444444444,\n \"acc_norm_stderr\": 0.015006312806446914\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.034873508801977725,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.034873508801977725\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.9195402298850575,\n \"acc_stderr\": 0.009726831316141866,\n \"acc_norm\": 0.9195402298850575,\n \"acc_norm_stderr\": 0.009726831316141866\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8439306358381503,\n \"acc_stderr\": 0.019539014685374036,\n \"acc_norm\": 0.8439306358381503,\n \"acc_norm_stderr\": 0.019539014685374036\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.6994413407821229,\n \"acc_stderr\": 0.015334566806251166,\n \"acc_norm\": 0.6994413407821229,\n \"acc_norm_stderr\": 0.015334566806251166\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8496732026143791,\n \"acc_stderr\": 0.02046417512433263,\n \"acc_norm\": 0.8496732026143791,\n \"acc_norm_stderr\": 0.02046417512433263\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8392282958199357,\n \"acc_stderr\": 0.020862388082391894,\n \"acc_norm\": 0.8392282958199357,\n \"acc_norm_stderr\": 0.020862388082391894\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8703703703703703,\n \"acc_stderr\": 0.018689725721062075,\n \"acc_norm\": 0.8703703703703703,\n \"acc_norm_stderr\": 0.018689725721062075\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.6595744680851063,\n \"acc_stderr\": 0.028267657482650158,\n \"acc_norm\": 0.6595744680851063,\n \"acc_norm_stderr\": 0.028267657482650158\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.60625814863103,\n \"acc_stderr\": 0.012478532272564435,\n \"acc_norm\": 0.60625814863103,\n \"acc_norm_stderr\": 0.012478532272564435\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8308823529411765,\n \"acc_stderr\": 0.022770868010112987,\n \"acc_norm\": 0.8308823529411765,\n \"acc_norm_stderr\": 0.022770868010112987\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.8137254901960784,\n \"acc_stderr\": 0.01575052628436337,\n \"acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.01575052628436337\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7727272727272727,\n \"acc_stderr\": 0.04013964554072775,\n \"acc_norm\": 0.7727272727272727,\n \"acc_norm_stderr\": 0.04013964554072775\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8326530612244898,\n \"acc_stderr\": 0.02389714476891452,\n \"acc_norm\": 0.8326530612244898,\n \"acc_norm_stderr\": 0.02389714476891452\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.9054726368159204,\n \"acc_stderr\": 0.020687186951534094,\n \"acc_norm\": 0.9054726368159204,\n \"acc_norm_stderr\": 0.020687186951534094\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.93,\n \"acc_stderr\": 0.0256432399976243,\n \"acc_norm\": 0.93,\n \"acc_norm_stderr\": 0.0256432399976243\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5783132530120482,\n \"acc_stderr\": 0.038444531817709175,\n \"acc_norm\": 0.5783132530120482,\n \"acc_norm_stderr\": 0.038444531817709175\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8771929824561403,\n \"acc_stderr\": 0.02517298435015578,\n \"acc_norm\": 0.8771929824561403,\n \"acc_norm_stderr\": 0.02517298435015578\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.6646266829865362,\n \"mc1_stderr\": 0.016527534039668987,\n \"mc2\": 0.7846937103895764,\n \"mc2_stderr\": 0.013816866993252642\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.840568271507498,\n \"acc_stderr\": 0.010288617479454764\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7937831690674754,\n \"acc_stderr\": 0.011144364089781438\n }\n}\n```", "repo_url": "https://huggingface.co/binbi/Ein-72B-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|arc:challenge|25_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|arc:challenge|25_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|gsm8k|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|gsm8k|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hellaswag|10_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hellaswag|10_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T13-53-38.190866.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T14-07-01.496774.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["**/details_harness|winogrande|5_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["**/details_harness|winogrande|5_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-05T14-07-01.496774.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_05T13_53_38.190866", "path": ["results_2024-02-05T13-53-38.190866.parquet"]}, {"split": "2024_02_05T14_07_01.496774", "path": ["results_2024-02-05T14-07-01.496774.parquet"]}, {"split": "latest", "path": ["results_2024-02-05T14-07-01.496774.parquet"]}]}]}
2024-02-05T14:09:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of binbi/Ein-72B-v0.1 Dataset automatically created during the evaluation run of model binbi/Ein-72B-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-05T14:07:01.496774(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of binbi/Ein-72B-v0.1\n\n\n\nDataset automatically created during the evaluation run of model binbi/Ein-72B-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T14:07:01.496774(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of binbi/Ein-72B-v0.1\n\n\n\nDataset automatically created during the evaluation run of model binbi/Ein-72B-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T14:07:01.496774(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
03f3ffedb756c25e3770dd70a70b12027eee6371
# Dataset Card for Evaluation run of cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO](https://huggingface.co/cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_cloudyu__TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-05T13:57:06.982400](https://huggingface.co/datasets/open-llm-leaderboard/details_cloudyu__TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO/blob/main/results_2024-02-05T13-57-06.982400.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7649892778549832, "acc_stderr": 0.02823313368050758, "acc_norm": 0.7681511495490131, "acc_norm_stderr": 0.028777527908042073, "mc1": 0.5458996328029376, "mc1_stderr": 0.017429593091323522, "mc2": 0.7131962651033679, "mc2_stderr": 0.014139525056193024 }, "harness|arc:challenge|25": { "acc": 0.7167235494880546, "acc_stderr": 0.013167478735134575, "acc_norm": 0.7406143344709898, "acc_norm_stderr": 0.012808273573927097 }, "harness|hellaswag|10": { "acc": 0.6703843855805617, "acc_stderr": 0.004691128722535485, "acc_norm": 0.8666600278828919, "acc_norm_stderr": 0.003392470498816845 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.7555555555555555, "acc_stderr": 0.03712537833614866, "acc_norm": 0.7555555555555555, "acc_norm_stderr": 0.03712537833614866 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.875, "acc_stderr": 0.026913523521537846, "acc_norm": 0.875, "acc_norm_stderr": 0.026913523521537846 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8037735849056604, "acc_stderr": 0.024442388131100813, "acc_norm": 0.8037735849056604, "acc_norm_stderr": 0.024442388131100813 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.9027777777777778, "acc_stderr": 0.024774516250440182, "acc_norm": 0.9027777777777778, "acc_norm_stderr": 0.024774516250440182 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.61, "acc_stderr": 0.049020713000019756, "acc_norm": 0.61, "acc_norm_stderr": 0.049020713000019756 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7109826589595376, "acc_stderr": 0.034564257450869995, "acc_norm": 0.7109826589595376, "acc_norm_stderr": 0.034564257450869995 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.5196078431372549, "acc_stderr": 0.04971358884367406, "acc_norm": 0.5196078431372549, "acc_norm_stderr": 0.04971358884367406 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.04093601807403326, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7574468085106383, "acc_stderr": 0.028020226271200217, "acc_norm": 0.7574468085106383, "acc_norm_stderr": 0.028020226271200217 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5964912280701754, "acc_stderr": 0.04615186962583707, "acc_norm": 0.5964912280701754, "acc_norm_stderr": 0.04615186962583707 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7517241379310344, "acc_stderr": 0.036001056927277696, "acc_norm": 0.7517241379310344, "acc_norm_stderr": 0.036001056927277696 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.7486772486772487, "acc_stderr": 0.0223404823396439, "acc_norm": 0.7486772486772487, "acc_norm_stderr": 0.0223404823396439 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5158730158730159, "acc_stderr": 0.044698818540726076, "acc_norm": 0.5158730158730159, "acc_norm_stderr": 0.044698818540726076 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.9064516129032258, "acc_stderr": 0.016565754668270982, "acc_norm": 0.9064516129032258, "acc_norm_stderr": 0.016565754668270982 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6699507389162561, "acc_stderr": 0.033085304262282574, "acc_norm": 0.6699507389162561, "acc_norm_stderr": 0.033085304262282574 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.77, "acc_stderr": 0.042295258468165044, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165044 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8848484848484849, "acc_stderr": 0.024925699798115344, "acc_norm": 0.8848484848484849, "acc_norm_stderr": 0.024925699798115344 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9343434343434344, "acc_stderr": 0.017646526677233335, "acc_norm": 0.9343434343434344, "acc_norm_stderr": 0.017646526677233335 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9740932642487047, "acc_stderr": 0.011464523356953162, "acc_norm": 0.9740932642487047, "acc_norm_stderr": 0.011464523356953162 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.8102564102564103, "acc_stderr": 0.019880165406588796, "acc_norm": 0.8102564102564103, "acc_norm_stderr": 0.019880165406588796 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.45925925925925926, "acc_stderr": 0.030384169232350832, "acc_norm": 0.45925925925925926, "acc_norm_stderr": 0.030384169232350832 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8445378151260504, "acc_stderr": 0.023536818625398897, "acc_norm": 0.8445378151260504, "acc_norm_stderr": 0.023536818625398897 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.5165562913907285, "acc_stderr": 0.04080244185628972, "acc_norm": 0.5165562913907285, "acc_norm_stderr": 0.04080244185628972 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9229357798165138, "acc_stderr": 0.011434381698911096, "acc_norm": 0.9229357798165138, "acc_norm_stderr": 0.011434381698911096 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6620370370370371, "acc_stderr": 0.03225941352631295, "acc_norm": 0.6620370370370371, "acc_norm_stderr": 0.03225941352631295 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9264705882352942, "acc_stderr": 0.018318855850089678, "acc_norm": 0.9264705882352942, "acc_norm_stderr": 0.018318855850089678 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9113924050632911, "acc_stderr": 0.018498315206865384, "acc_norm": 0.9113924050632911, "acc_norm_stderr": 0.018498315206865384 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8026905829596412, "acc_stderr": 0.02670985334496796, "acc_norm": 0.8026905829596412, "acc_norm_stderr": 0.02670985334496796 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8702290076335878, "acc_stderr": 0.029473649496907065, "acc_norm": 0.8702290076335878, "acc_norm_stderr": 0.029473649496907065 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035202, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035202 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8981481481481481, "acc_stderr": 0.02923927267563275, "acc_norm": 0.8981481481481481, "acc_norm_stderr": 0.02923927267563275 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8711656441717791, "acc_stderr": 0.026321383198783674, "acc_norm": 0.8711656441717791, "acc_norm_stderr": 0.026321383198783674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5446428571428571, "acc_stderr": 0.04726835553719098, "acc_norm": 0.5446428571428571, "acc_norm_stderr": 0.04726835553719098 }, "harness|hendrycksTest-management|5": { "acc": 0.8640776699029126, "acc_stderr": 0.0339329572976101, "acc_norm": 0.8640776699029126, "acc_norm_stderr": 0.0339329572976101 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9444444444444444, "acc_stderr": 0.01500631280644693, "acc_norm": 0.9444444444444444, "acc_norm_stderr": 0.01500631280644693 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.9157088122605364, "acc_stderr": 0.009934966499513791, "acc_norm": 0.9157088122605364, "acc_norm_stderr": 0.009934966499513791 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8323699421965318, "acc_stderr": 0.020110579919734847, "acc_norm": 0.8323699421965318, "acc_norm_stderr": 0.020110579919734847 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.8, "acc_stderr": 0.013378001241813072, "acc_norm": 0.8, "acc_norm_stderr": 0.013378001241813072 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02082375883758091, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02082375883758091 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8006430868167203, "acc_stderr": 0.022691033780549656, "acc_norm": 0.8006430868167203, "acc_norm_stderr": 0.022691033780549656 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8672839506172839, "acc_stderr": 0.018877353839571842, "acc_norm": 0.8672839506172839, "acc_norm_stderr": 0.018877353839571842 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.648936170212766, "acc_stderr": 0.028473501272963758, "acc_norm": 0.648936170212766, "acc_norm_stderr": 0.028473501272963758 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5912646675358539, "acc_stderr": 0.01255570134670338, "acc_norm": 0.5912646675358539, "acc_norm_stderr": 0.01255570134670338 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8382352941176471, "acc_stderr": 0.022368672562886747, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.022368672562886747 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.815359477124183, "acc_stderr": 0.015697029240757773, "acc_norm": 0.815359477124183, "acc_norm_stderr": 0.015697029240757773 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04265792110940589, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04265792110940589 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8489795918367347, "acc_stderr": 0.022923004094736847, "acc_norm": 0.8489795918367347, "acc_norm_stderr": 0.022923004094736847 }, "harness|hendrycksTest-sociology|5": { "acc": 0.9104477611940298, "acc_stderr": 0.02019067053502792, "acc_norm": 0.9104477611940298, "acc_norm_stderr": 0.02019067053502792 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.92, "acc_stderr": 0.0272659924344291, "acc_norm": 0.92, "acc_norm_stderr": 0.0272659924344291 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598053, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598053 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.02517298435015577, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.02517298435015577 }, "harness|truthfulqa:mc|0": { "mc1": 0.5458996328029376, "mc1_stderr": 0.017429593091323522, "mc2": 0.7131962651033679, "mc2_stderr": 0.014139525056193024 }, "harness|winogrande|5": { "acc": 0.8342541436464088, "acc_stderr": 0.01045089954537063 }, "harness|gsm8k|5": { "acc": 0.7293404094010614, "acc_stderr": 0.012238245006183411 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_cloudyu__TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO
[ "region:us" ]
2024-02-05T13:59:21+00:00
{"pretty_name": "Evaluation run of cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO", "dataset_summary": "Dataset automatically created during the evaluation run of model [cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO](https://huggingface.co/cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_cloudyu__TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-05T13:57:06.982400](https://huggingface.co/datasets/open-llm-leaderboard/details_cloudyu__TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO/blob/main/results_2024-02-05T13-57-06.982400.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7649892778549832,\n \"acc_stderr\": 0.02823313368050758,\n \"acc_norm\": 0.7681511495490131,\n \"acc_norm_stderr\": 0.028777527908042073,\n \"mc1\": 0.5458996328029376,\n \"mc1_stderr\": 0.017429593091323522,\n \"mc2\": 0.7131962651033679,\n \"mc2_stderr\": 0.014139525056193024\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7167235494880546,\n \"acc_stderr\": 0.013167478735134575,\n \"acc_norm\": 0.7406143344709898,\n \"acc_norm_stderr\": 0.012808273573927097\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6703843855805617,\n \"acc_stderr\": 0.004691128722535485,\n \"acc_norm\": 0.8666600278828919,\n \"acc_norm_stderr\": 0.003392470498816845\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.7555555555555555,\n \"acc_stderr\": 0.03712537833614866,\n \"acc_norm\": 0.7555555555555555,\n \"acc_norm_stderr\": 0.03712537833614866\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.875,\n \"acc_stderr\": 0.026913523521537846,\n \"acc_norm\": 0.875,\n \"acc_norm_stderr\": 0.026913523521537846\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8037735849056604,\n \"acc_stderr\": 0.024442388131100813,\n \"acc_norm\": 0.8037735849056604,\n \"acc_norm_stderr\": 0.024442388131100813\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.9027777777777778,\n \"acc_stderr\": 0.024774516250440182,\n \"acc_norm\": 0.9027777777777778,\n \"acc_norm_stderr\": 0.024774516250440182\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.049020713000019756,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.049020713000019756\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7109826589595376,\n \"acc_stderr\": 0.034564257450869995,\n \"acc_norm\": 0.7109826589595376,\n \"acc_norm_stderr\": 0.034564257450869995\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.5196078431372549,\n \"acc_stderr\": 0.04971358884367406,\n \"acc_norm\": 0.5196078431372549,\n \"acc_norm_stderr\": 0.04971358884367406\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7574468085106383,\n \"acc_stderr\": 0.028020226271200217,\n \"acc_norm\": 0.7574468085106383,\n \"acc_norm_stderr\": 0.028020226271200217\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5964912280701754,\n \"acc_stderr\": 0.04615186962583707,\n \"acc_norm\": 0.5964912280701754,\n \"acc_norm_stderr\": 0.04615186962583707\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7517241379310344,\n \"acc_stderr\": 0.036001056927277696,\n \"acc_norm\": 0.7517241379310344,\n \"acc_norm_stderr\": 0.036001056927277696\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.7486772486772487,\n \"acc_stderr\": 0.0223404823396439,\n \"acc_norm\": 0.7486772486772487,\n \"acc_norm_stderr\": 0.0223404823396439\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5158730158730159,\n \"acc_stderr\": 0.044698818540726076,\n \"acc_norm\": 0.5158730158730159,\n \"acc_norm_stderr\": 0.044698818540726076\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.9064516129032258,\n \"acc_stderr\": 0.016565754668270982,\n \"acc_norm\": 0.9064516129032258,\n \"acc_norm_stderr\": 0.016565754668270982\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6699507389162561,\n \"acc_stderr\": 0.033085304262282574,\n \"acc_norm\": 0.6699507389162561,\n \"acc_norm_stderr\": 0.033085304262282574\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165044,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165044\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8848484848484849,\n \"acc_stderr\": 0.024925699798115344,\n \"acc_norm\": 0.8848484848484849,\n \"acc_norm_stderr\": 0.024925699798115344\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9343434343434344,\n \"acc_stderr\": 0.017646526677233335,\n \"acc_norm\": 0.9343434343434344,\n \"acc_norm_stderr\": 0.017646526677233335\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9740932642487047,\n \"acc_stderr\": 0.011464523356953162,\n \"acc_norm\": 0.9740932642487047,\n \"acc_norm_stderr\": 0.011464523356953162\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.8102564102564103,\n \"acc_stderr\": 0.019880165406588796,\n \"acc_norm\": 0.8102564102564103,\n \"acc_norm_stderr\": 0.019880165406588796\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.45925925925925926,\n \"acc_stderr\": 0.030384169232350832,\n \"acc_norm\": 0.45925925925925926,\n \"acc_norm_stderr\": 0.030384169232350832\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8445378151260504,\n \"acc_stderr\": 0.023536818625398897,\n \"acc_norm\": 0.8445378151260504,\n \"acc_norm_stderr\": 0.023536818625398897\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.5165562913907285,\n \"acc_stderr\": 0.04080244185628972,\n \"acc_norm\": 0.5165562913907285,\n \"acc_norm_stderr\": 0.04080244185628972\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9229357798165138,\n \"acc_stderr\": 0.011434381698911096,\n \"acc_norm\": 0.9229357798165138,\n \"acc_norm_stderr\": 0.011434381698911096\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6620370370370371,\n \"acc_stderr\": 0.03225941352631295,\n \"acc_norm\": 0.6620370370370371,\n \"acc_norm_stderr\": 0.03225941352631295\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9264705882352942,\n \"acc_stderr\": 0.018318855850089678,\n \"acc_norm\": 0.9264705882352942,\n \"acc_norm_stderr\": 0.018318855850089678\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9113924050632911,\n \"acc_stderr\": 0.018498315206865384,\n \"acc_norm\": 0.9113924050632911,\n \"acc_norm_stderr\": 0.018498315206865384\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8026905829596412,\n \"acc_stderr\": 0.02670985334496796,\n \"acc_norm\": 0.8026905829596412,\n \"acc_norm_stderr\": 0.02670985334496796\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8702290076335878,\n \"acc_stderr\": 0.029473649496907065,\n \"acc_norm\": 0.8702290076335878,\n \"acc_norm_stderr\": 0.029473649496907065\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035202,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035202\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8981481481481481,\n \"acc_stderr\": 0.02923927267563275,\n \"acc_norm\": 0.8981481481481481,\n \"acc_norm_stderr\": 0.02923927267563275\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8711656441717791,\n \"acc_stderr\": 0.026321383198783674,\n \"acc_norm\": 0.8711656441717791,\n \"acc_norm_stderr\": 0.026321383198783674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5446428571428571,\n \"acc_stderr\": 0.04726835553719098,\n \"acc_norm\": 0.5446428571428571,\n \"acc_norm_stderr\": 0.04726835553719098\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8640776699029126,\n \"acc_stderr\": 0.0339329572976101,\n \"acc_norm\": 0.8640776699029126,\n \"acc_norm_stderr\": 0.0339329572976101\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9444444444444444,\n \"acc_stderr\": 0.01500631280644693,\n \"acc_norm\": 0.9444444444444444,\n \"acc_norm_stderr\": 0.01500631280644693\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.9157088122605364,\n \"acc_stderr\": 0.009934966499513791,\n \"acc_norm\": 0.9157088122605364,\n \"acc_norm_stderr\": 0.009934966499513791\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8323699421965318,\n \"acc_stderr\": 0.020110579919734847,\n \"acc_norm\": 0.8323699421965318,\n \"acc_norm_stderr\": 0.020110579919734847\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.013378001241813072,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.013378001241813072\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02082375883758091,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02082375883758091\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8006430868167203,\n \"acc_stderr\": 0.022691033780549656,\n \"acc_norm\": 0.8006430868167203,\n \"acc_norm_stderr\": 0.022691033780549656\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8672839506172839,\n \"acc_stderr\": 0.018877353839571842,\n \"acc_norm\": 0.8672839506172839,\n \"acc_norm_stderr\": 0.018877353839571842\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.648936170212766,\n \"acc_stderr\": 0.028473501272963758,\n \"acc_norm\": 0.648936170212766,\n \"acc_norm_stderr\": 0.028473501272963758\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5912646675358539,\n \"acc_stderr\": 0.01255570134670338,\n \"acc_norm\": 0.5912646675358539,\n \"acc_norm_stderr\": 0.01255570134670338\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.022368672562886747,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.022368672562886747\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.815359477124183,\n \"acc_stderr\": 0.015697029240757773,\n \"acc_norm\": 0.815359477124183,\n \"acc_norm_stderr\": 0.015697029240757773\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04265792110940589,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04265792110940589\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8489795918367347,\n \"acc_stderr\": 0.022923004094736847,\n \"acc_norm\": 0.8489795918367347,\n \"acc_norm_stderr\": 0.022923004094736847\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.9104477611940298,\n \"acc_stderr\": 0.02019067053502792,\n \"acc_norm\": 0.9104477611940298,\n \"acc_norm_stderr\": 0.02019067053502792\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.92,\n \"acc_stderr\": 0.0272659924344291,\n \"acc_norm\": 0.92,\n \"acc_norm_stderr\": 0.0272659924344291\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598053,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598053\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8771929824561403,\n \"acc_stderr\": 0.02517298435015577,\n \"acc_norm\": 0.8771929824561403,\n \"acc_norm_stderr\": 0.02517298435015577\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5458996328029376,\n \"mc1_stderr\": 0.017429593091323522,\n \"mc2\": 0.7131962651033679,\n \"mc2_stderr\": 0.014139525056193024\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8342541436464088,\n \"acc_stderr\": 0.01045089954537063\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7293404094010614,\n \"acc_stderr\": 0.012238245006183411\n }\n}\n```", "repo_url": "https://huggingface.co/cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|arc:challenge|25_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|gsm8k|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hellaswag|10_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-05T13-57-06.982400.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["**/details_harness|winogrande|5_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-05T13-57-06.982400.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_05T13_57_06.982400", "path": ["results_2024-02-05T13-57-06.982400.parquet"]}, {"split": "latest", "path": ["results_2024-02-05T13-57-06.982400.parquet"]}]}]}
2024-02-05T13:59:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO Dataset automatically created during the evaluation run of model cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-05T13:57:06.982400(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO\n\n\n\nDataset automatically created during the evaluation run of model cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T13:57:06.982400(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO\n\n\n\nDataset automatically created during the evaluation run of model cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-05T13:57:06.982400(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
a6f8b3eec85972689975eda52b73076bb067fd87
# Dataset Card for "openaccess-ai-collective-oo-gpt4-filtered" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
dim/openaccess-ai-collective-oo-gpt4-filtered
[ "region:us" ]
2024-02-05T14:04:51+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "system_prompt", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1301898769.2750826, "num_examples": 719045}, {"name": "test", "num_bytes": 181059428.72491744, "num_examples": 100000}], "download_size": 846998763, "dataset_size": 1482958198.0}}
2024-02-05T14:07:54+00:00
[]
[]
TAGS #region-us
# Dataset Card for "openaccess-ai-collective-oo-gpt4-filtered" More Information needed
[ "# Dataset Card for \"openaccess-ai-collective-oo-gpt4-filtered\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"openaccess-ai-collective-oo-gpt4-filtered\"\n\nMore Information needed" ]
c6a1ee8a7383ba3e51a1570f0a9f12d7348a6e47
# Dataset Card for "Intent Classification for Robot Assisted Disaster Response" <!-- Provide a quick summary of the dataset. --> This dataset consists of conversations recorded during the training sessions in the emergency response domain. The conversations are typically between several operators controlling the robots, a team leader and a mission commander. The data have been transcribed and annotated during the following projects: [TRADR](http://www.tradr-project.eu/) and [ADRZ](https://rettungsrobotik.de/home). The dialogues are split into turns and each turn is annotated with a speaker and intent. ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** DFKI, [Talking Robots Group at MLT](https://www.dfki.de/en/web/research/research-departments/multilinguality-and-language-technology/tr-team) <!-- - **Funded by [optional]:** [More Information Needed] --> <!-- - **Shared by [optional]:** [More Information Needed] --> - **Language(s) (NLP):** German - **License:** [More Information Needed] <!-- ### Dataset Sources [optional] --> <!-- Provide the basic links for the dataset. --> <!-- - **Repository:** [More Information Needed] --> <!-- - **Paper [optional]:** [More Information Needed] --> <!-- - **Demo [optional]:** [More Information Needed] --> <!-- ## Uses --> <!-- Address questions around how the dataset is intended to be used. --> <!-- ### Direct Use --> <!-- This section describes suitable use cases for the dataset. --> <!-- [More Information Needed] --> <!-- ### Out-of-Scope Use --> <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> <!-- [More Information Needed] --> ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> ### Data Instances ``` { 'id': '1235', 'speaker': 'UAV', 'text': 'wir haben einmal den Akku gewechselt, bis jetzt noch kein Rauch festzustellen ...', 'label': 2 } ``` ### Data Fields ``` id: the id of the dialogue turn, an `int` feature speaker: the speaker of the turn, a `string` feature text: the utterance of the turn, a `string` feature label: the label of the turn, an `int` feature ``` ### Data Splits This dataset contains 3525 dialogue turns in total. The data are split as follows: 2610 turns for training, 310 for development and 605 for test. The data represent a continuous conversation, i.e., the previous id refers to the previous turn in the dialogue. ### Label Description and Statistics | label | meaning | train | percentage | example | | --- | --- | --- | --- | --- | | 0 | disconfirm | 35 | 1.3% | `Ist negativ, noch nicht.` | | 1 | order | 216 | 8.3% | `FΓΌr Sie Erkundungsauftrag: GesamtΓΌberblick ΓΌber die Einsatzstelle. Kommen.` | | 2 | info_provide | 979 | 37.5% | `Ich verlasse das Erdgeschoss und gehe ins erste Obergeschoss.` | | 3 | info_request | 238 | 9.1% | `Frage: Erkundungsergebnis aus der ΓΆstlichen Seite des GebΓ€udes, kommen.` | | 4 | call | 487 | 18.7% | `RobLW an ZugfΓΌhrer, kommen.` | | 5 | call_response | 370 | 14.2% | `Ja, hier ist ZugfΓΌhrer, kommen.` | | 6 | other | 43 | 1.7% | `Einen Augenblick, ich melde mich gleich.` | | 7 | confirm | 242 | 9.3% | `Ein Lagebild von oben, komplette Lage, und ein Lagebild zwischen den beiden TΓΌren, verstanden.` | ## Dataset Creation ### Curation Rationale The dataset is based on the recordings from the emergency response domain that use radio communication protocol. The goal of the conversation is to coordinate rescue operations in a robot-assisted disaster response. ### Source Data The data are based on human-human communication in robot-assisted disaster response. The dialogues are task-oriented, focused on collaborative execution of a mission by a team that uses robots to to explore some area, find hazardous materials, locate fires, damage or victims. #### Data Collection and Processing The initial audio recordings were collected during the [TRADR](http://www.tradr-project.eu/) and [ADRZ](https://rettungsrobotik.de/home) projects, transcribed and annotated by the [Talking Robots Group, DFKI](https://www.dfki.de/en/web/research/research-departments/multilinguality-and-language-technology/tr-team) <!--#### Who are the source data producers?--> <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> ### Annotations The annotations include dialogue intents relevant for communication in the emergency response domain: `call`, `call_response`, `info_request`, `info_provide`, `confirm`, `disconfirm`, `order` and `other`. Note the interpretation of the intent depends on the context. E.g., the following examples illustrate how very similar responses ("Warten", "Wait") are annotated differently depending on the previous turn: ``` (1) disconfirm - KΓΆnnen wir weitermachen? (Shall we continue?) - Warten. (Wait.) (2) confirm - Hast du die MΓΆglichkeit, das Fass nΓ€her zu identifizieren, was da drin ist? (Can you inspect the barrel closer to identify what is inside?) - Ja, warten. (Yes, wait.) (3) order - Werde aber jetzt auch mal die rΓΌckwΓ€rtige Seite des Fasses erkunden. (I will inspect now the back side of the barrel.) - UGV 1, damit warten. (UGV 1, wait.) (4) other (pausing to check) - Frage: kommen meine Fotos an? (Question: do you receive my photos?) - Warten. (Wait.) ``` #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> The recordings were manually transcribed and annotated with emergency response intents. There are 3525 dialogue turns in total with 6.3 tokens per turn on average. #### Who are the annotators? All annotations were done by the research assistants of the [Talking Robots Group, DFKI](https://www.dfki.de/en/web/research/research-departments/multilinguality-and-language-technology/tr-team) #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> The dataset does not include any real names, addresses or other personal information. The recordings were done during training sessions with simulations of the emergency situation. ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> The dataset covers only a subset of possible emergency situations, focusing mainly on fire, building collapse and chemical leakage. It does not address many other situations, e.g., traffic accidents, floods or explosions. <!--### Recommendations --> <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> <!-- Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. --> ## Citation <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> Part of this dataset has been introduced in the following paper. However, the current version includes more annotated turns due to additional data collection. **BibTeX:** ``` @inproceedings{anikina-2023-towards, title = "Towards Efficient Dialogue Processing in the Emergency Response Domain", author = "Anikina, Tatiana", editor = "Padmakumar, Vishakh and Vallejo, Gisela and Fu, Yao", booktitle = "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 4: Student Research Workshop)", month = jul, year = "2023", address = "Toronto, Canada", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/2023.acl-srw.31", doi = "10.18653/v1/2023.acl-srw.31", pages = "212--225", abstract = "In this paper we describe the task of adapting NLP models to dialogue processing in the emergency response domain. Our goal is to provide a recipe for building a system that performs dialogue act classification and domain-specific slot tagging while being efficient, flexible and robust. We show that adapter models Pfeiffer et al. (2020) perform well in the emergency response domain and benefit from additional dialogue context and speaker information. Comparing adapters to standard fine-tuned Transformer models we show that they achieve competitive results and can easily accommodate new tasks without significant memory increase since the base model can be shared between the adapters specializing on different tasks. We also address the problem of scarce annotations in the emergency response domain and evaluate different data augmentation techniques in a low-resource setting.", } ``` **APA:** ``` Anikina, T. (2023). Towards Efficient Dialogue Processing in the Emergency Response Domain. Annual Meeting of the Association for Computational Linguistics. ``` ## Glossary Abbrevations used for the speakers: UGV: Unmanned Ground Vehicle UAV: Unmanned Aerial Vehicle MC: Mission Commander TL: Team Leader RobLW: Robotikleitwagen (robotic lead vehicle) ZF: ZugfΓΌhrer (fire brigade commander) GF: GruppenfΓΌhrer (group leader) ELW: Einsatzleitwagen (emergency command vehicle) GW-DUK: GerΓ€tewagen-Daten-und-Kommunikation (vehicle for transporting robots and equipment) <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
DFKI/radr_intents
[ "task_categories:text-classification", "size_categories:100K<n<1M", "language:de", "region:us" ]
2024-02-05T14:13:15+00:00
{"language": ["de"], "size_categories": ["100K<n<1M"], "task_categories": ["text-classification"], "pretty_name": "Intent Classification for Robot Assisted Disaster Response"}
2024-02-05T14:32:04+00:00
[]
[ "de" ]
TAGS #task_categories-text-classification #size_categories-100K<n<1M #language-German #region-us
Dataset Card for "Intent Classification for Robot Assisted Disaster Response" ============================================================================= This dataset consists of conversations recorded during the training sessions in the emergency response domain. The conversations are typically between several operators controlling the robots, a team leader and a mission commander. The data have been transcribed and annotated during the following projects: TRADR and ADRZ. The dialogues are split into turns and each turn is annotated with a speaker and intent. Dataset Details --------------- ### Dataset Description * Curated by: DFKI, Talking Robots Group at MLT * Language(s) (NLP): German * License: Dataset Structure ----------------- ### Data Instances ### Data Fields ### Data Splits This dataset contains 3525 dialogue turns in total. The data are split as follows: 2610 turns for training, 310 for development and 605 for test. The data represent a continuous conversation, i.e., the previous id refers to the previous turn in the dialogue. ### Label Description and Statistics Dataset Creation ---------------- ### Curation Rationale The dataset is based on the recordings from the emergency response domain that use radio communication protocol. The goal of the conversation is to coordinate rescue operations in a robot-assisted disaster response. ### Source Data The data are based on human-human communication in robot-assisted disaster response. The dialogues are task-oriented, focused on collaborative execution of a mission by a team that uses robots to to explore some area, find hazardous materials, locate fires, damage or victims. #### Data Collection and Processing The initial audio recordings were collected during the TRADR and ADRZ projects, transcribed and annotated by the Talking Robots Group, DFKI ### Annotations The annotations include dialogue intents relevant for communication in the emergency response domain: 'call', 'call\_response', 'info\_request', 'info\_provide', 'confirm', 'disconfirm', 'order' and 'other'. Note the interpretation of the intent depends on the context. E.g., the following examples illustrate how very similar responses ("Warten", "Wait") are annotated differently depending on the previous turn: #### Annotation process The recordings were manually transcribed and annotated with emergency response intents. There are 3525 dialogue turns in total with 6.3 tokens per turn on average. #### Who are the annotators? All annotations were done by the research assistants of the Talking Robots Group, DFKI #### Personal and Sensitive Information The dataset does not include any real names, addresses or other personal information. The recordings were done during training sessions with simulations of the emergency situation. Bias, Risks, and Limitations ---------------------------- The dataset covers only a subset of possible emergency situations, focusing mainly on fire, building collapse and chemical leakage. It does not address many other situations, e.g., traffic accidents, floods or explosions. Part of this dataset has been introduced in the following paper. However, the current version includes more annotated turns due to additional data collection. BibTeX: APA: Glossary -------- Abbrevations used for the speakers: UGV: Unmanned Ground Vehicle UAV: Unmanned Aerial Vehicle MC: Mission Commander TL: Team Leader RobLW: Robotikleitwagen (robotic lead vehicle) ZF: ZugfΓΌhrer (fire brigade commander) GF: GruppenfΓΌhrer (group leader) ELW: Einsatzleitwagen (emergency command vehicle) GW-DUK: GerΓ€tewagen-Daten-und-Kommunikation (vehicle for transporting robots and equipment)
[ "### Dataset Description\n\n\n* Curated by: DFKI, Talking Robots Group at MLT\n* Language(s) (NLP): German\n* License:\n\n\nDataset Structure\n-----------------", "### Data Instances", "### Data Fields", "### Data Splits\n\n\nThis dataset contains 3525 dialogue turns in total. The data are split as follows: 2610 turns for training, 310 for development and 605 for test. The data represent a continuous conversation, i.e., the previous id refers to the previous turn in the dialogue.", "### Label Description and Statistics\n\n\n\nDataset Creation\n----------------", "### Curation Rationale\n\n\nThe dataset is based on the recordings from the emergency response domain that use radio communication protocol. The goal of the conversation is to coordinate rescue operations in a robot-assisted disaster response.", "### Source Data\n\n\nThe data are based on human-human communication in robot-assisted disaster response. The dialogues are task-oriented, focused on collaborative execution of a mission by a team that uses robots to to explore some area, find hazardous materials, locate fires, damage or victims.", "#### Data Collection and Processing\n\n\nThe initial audio recordings were collected during the TRADR and ADRZ projects, transcribed and annotated by the Talking Robots Group, DFKI", "### Annotations\n\n\nThe annotations include dialogue intents relevant for communication in the emergency response domain: 'call', 'call\\_response', 'info\\_request', 'info\\_provide', 'confirm', 'disconfirm', 'order' and 'other'.\n\n\nNote the interpretation of the intent depends on the context. E.g., the following examples illustrate how very similar responses (\"Warten\", \"Wait\") are annotated differently depending on the previous turn:", "#### Annotation process\n\n\nThe recordings were manually transcribed and annotated with emergency response intents. There are 3525 dialogue turns in total with 6.3 tokens per turn on average.", "#### Who are the annotators?\n\n\nAll annotations were done by the research assistants of the Talking Robots Group, DFKI", "#### Personal and Sensitive Information\n\n\nThe dataset does not include any real names, addresses or other personal information. The recordings were done during training sessions with simulations of the emergency situation.\n\n\nBias, Risks, and Limitations\n----------------------------\n\n\nThe dataset covers only a subset of possible emergency situations, focusing mainly on fire, building collapse and chemical leakage. It does not address many other situations, e.g., traffic accidents, floods or explosions.\n\n\nPart of this dataset has been introduced in the following paper. However, the current version includes more annotated turns due to additional data collection.\n\n\nBibTeX:\n\n\nAPA:\n\n\nGlossary\n--------\n\n\nAbbrevations used for the speakers:\n\n\nUGV: Unmanned Ground Vehicle\n\n\nUAV: Unmanned Aerial Vehicle\n\n\nMC: Mission Commander\n\n\nTL: Team Leader\n\n\nRobLW: Robotikleitwagen (robotic lead vehicle)\n\n\nZF: ZugfΓΌhrer (fire brigade commander)\n\n\nGF: GruppenfΓΌhrer (group leader)\n\n\nELW: Einsatzleitwagen (emergency command vehicle)\n\n\nGW-DUK: GerΓ€tewagen-Daten-und-Kommunikation (vehicle for transporting robots and equipment)" ]
[ "TAGS\n#task_categories-text-classification #size_categories-100K<n<1M #language-German #region-us \n", "### Dataset Description\n\n\n* Curated by: DFKI, Talking Robots Group at MLT\n* Language(s) (NLP): German\n* License:\n\n\nDataset Structure\n-----------------", "### Data Instances", "### Data Fields", "### Data Splits\n\n\nThis dataset contains 3525 dialogue turns in total. The data are split as follows: 2610 turns for training, 310 for development and 605 for test. The data represent a continuous conversation, i.e., the previous id refers to the previous turn in the dialogue.", "### Label Description and Statistics\n\n\n\nDataset Creation\n----------------", "### Curation Rationale\n\n\nThe dataset is based on the recordings from the emergency response domain that use radio communication protocol. The goal of the conversation is to coordinate rescue operations in a robot-assisted disaster response.", "### Source Data\n\n\nThe data are based on human-human communication in robot-assisted disaster response. The dialogues are task-oriented, focused on collaborative execution of a mission by a team that uses robots to to explore some area, find hazardous materials, locate fires, damage or victims.", "#### Data Collection and Processing\n\n\nThe initial audio recordings were collected during the TRADR and ADRZ projects, transcribed and annotated by the Talking Robots Group, DFKI", "### Annotations\n\n\nThe annotations include dialogue intents relevant for communication in the emergency response domain: 'call', 'call\\_response', 'info\\_request', 'info\\_provide', 'confirm', 'disconfirm', 'order' and 'other'.\n\n\nNote the interpretation of the intent depends on the context. E.g., the following examples illustrate how very similar responses (\"Warten\", \"Wait\") are annotated differently depending on the previous turn:", "#### Annotation process\n\n\nThe recordings were manually transcribed and annotated with emergency response intents. There are 3525 dialogue turns in total with 6.3 tokens per turn on average.", "#### Who are the annotators?\n\n\nAll annotations were done by the research assistants of the Talking Robots Group, DFKI", "#### Personal and Sensitive Information\n\n\nThe dataset does not include any real names, addresses or other personal information. The recordings were done during training sessions with simulations of the emergency situation.\n\n\nBias, Risks, and Limitations\n----------------------------\n\n\nThe dataset covers only a subset of possible emergency situations, focusing mainly on fire, building collapse and chemical leakage. It does not address many other situations, e.g., traffic accidents, floods or explosions.\n\n\nPart of this dataset has been introduced in the following paper. However, the current version includes more annotated turns due to additional data collection.\n\n\nBibTeX:\n\n\nAPA:\n\n\nGlossary\n--------\n\n\nAbbrevations used for the speakers:\n\n\nUGV: Unmanned Ground Vehicle\n\n\nUAV: Unmanned Aerial Vehicle\n\n\nMC: Mission Commander\n\n\nTL: Team Leader\n\n\nRobLW: Robotikleitwagen (robotic lead vehicle)\n\n\nZF: ZugfΓΌhrer (fire brigade commander)\n\n\nGF: GruppenfΓΌhrer (group leader)\n\n\nELW: Einsatzleitwagen (emergency command vehicle)\n\n\nGW-DUK: GerΓ€tewagen-Daten-und-Kommunikation (vehicle for transporting robots and equipment)" ]
2a2039d75f50de95275a2a9ed9f0b9af7aef8d4f
# Dataset Card for "results" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Tii-Internal-eval/results
[ "region:us" ]
2024-02-05T14:47:34+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "config", "struct": [{"name": "model_dtype", "dtype": "string"}, {"name": "model_name", "dtype": "string"}, {"name": "model_sha", "dtype": "string"}]}, {"name": "results", "struct": [{"name": "task_name1", "struct": [{"name": "metric_name", "dtype": "int64"}]}, {"name": "task_name2", "struct": [{"name": "metric_name", "dtype": "float64"}]}]}], "splits": [{"name": "train", "num_bytes": 215, "num_examples": 2}], "download_size": 3782, "dataset_size": 215}}
2024-02-05T15:27:09+00:00
[]
[]
TAGS #region-us
# Dataset Card for "results" More Information needed
[ "# Dataset Card for \"results\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"results\"\n\nMore Information needed" ]
70045ae28eed981126061f4fdd5fd38090bce76a
# RepoBench v1.1 (Java) ## Introduction This dataset presents the **Java** portion of [RepoBench](https://arxiv.org/abs/2306.03091) v1.1 (ICLR 2024). The data encompasses a collection from GitHub, spanning the period from **October 6th to November 31st, 2023**. With a commitment to data integrity, we've implemented a deduplication process based on file content against the Stack v2 dataset (coming soon), aiming to mitigate data leakage and memorization concerns. ## Resources and Links - [Paper](https://arxiv.org/abs/2306.03091) - [GitHub](https://github.com/Leolty/repobench) - [Dataset Introduction](https://github.com/Leolty/repobench/blob/main/data/README.md) ## FAQs - **Q:** What do the features in the dataset mean? **A:** Imagine you're coding and you want to write the next line of your code. The dataset provides you the following information: - `repo_name` (string): the name of the repository - `file_path` (string): the path of the current file - `context` (list): the cross-file code snippets that might be helpful for writing the next line: - `identifier` (string): the identifier of the code snippet - `path` (string): the path of the code snippet - `snippet` (string): the code snippet - `import_statement` (string): the import statement of the current file - `cropped_code` (string): the cropped code of the current file (up to previous 120 lines) - `all_code` (string): the entire code of the current file (not cropped) - `next_line` (string): the next line of the code (this serves as the target) - `gold_snippet_index` (int): the index of the gold snippet in the context (which will be used in next line, just for reference, you should not use this for next line prediction) - `created_at` (string): the creation time of the repository - `level` (string): the level of next line completion, which is measured by the number of tokens for the whole prompt (including all the context, import statement, cropped code and some neccessary separator tokens) - **Q:** How does the level be defined? **A:** The level is determined by the number of tokens for the whole prompt (including all the context, import statement, cropped code and some neccessary separator tokens). The token number is calculated by the tokenizer of GPT-4 by using [tiktoken](https://github.com/openai/tiktoken). The following table shows the level definition: | Level | Prompt Length (Number of Tokens) | |-------|------------------------| | 2k | 640 - 1,600 | | 4k | 1,600 - 3,600 | | 8k | 3,600 - 7,200 | | 12k | 7,200 - 10,800 | | 16k | 10,800 - 14,400 | | 24k | 14,400 - 21,600 | | 32k | 21,600 - 28,800 | | 64k | 28,800 - 57,600 | | 128k | 57,600 - 100,000 | - **Q:** What does the different splits mean? **A:** The dataset is split into three parts: - `cross_file_first`: the next line of code utilizes content from a cross-file code snippet and it is its first usage within current file. - `cross_file_random`: the next line of code utilizes content from a cross-file code snippet and it is NOT its first usage within current file. - `in_file`: the next line of code does not utilize content from a cross-file code snippet. - **Q:** How to construct the prompt for next line prediction? **A:** We hereby provide the official implementation for constructing prompts. Please note that the methods described below are not necessarily the optimal way of construction. Reordering, retrieval argumentation, or employing different cropping/construction techniques could potentially lead to varying degrees of improvement. Ensure that your model evaluations are conducted in a fair manner. ```python import re def construct_prompt( data: dict, language: str = "python", tokenizer= None, max_token_nums: int = 15800 ) -> str: """ Construct the prompt for next line prediction. :param data: data point from the dataset :param language: the language of the code :param tokenizer: the tokenizer of the evaluation model :param max_token_nums: the maximum number of tokens constraint for the prompt :return: the constructed prompt """ # comment symbol for different languages comment_symbol = "#" if language == "python" else "//" # construct the cross-file prompt and in-file prompt separately # cross-file prompt cross_file_prompt = f"{comment_symbol} Repo Name: {data['repo_name']}\n" for snippet in data['context']: cross_file_prompt += f"{comment_symbol} Path: {snippet['path']}\n{snippet['snippet']}" + "\n\n" # in-file prompt in_file_prompt = f"{comment_symbol} Path: {data['file_path']}\n{data['import_statement']}\n{data['cropped_code']}\n" # if we assign the tokenizer and the max_token_nums, we will truncate the cross-file prompt to meet the constraint if tokenizer is not None and max_token_nums is not None: cross_file_prompt_token_nums = len(tokenizer.encode(cross_file_prompt)) in_file_prompt_token_nums = len(tokenizer.encode(in_file_prompt)) exceed_token_nums = cross_file_prompt_token_nums + in_file_prompt_token_nums - max_token_nums if exceed_token_nums > 0: # split the cross-file prompt into lines cross_file_prompt_lines = cross_file_prompt.split("\n") # drop lines from end until the extra token number is less than 0 for i in range(len(repo_prompt_lines)-1, -1, -1): extra_token_num -= len(tokenizer.encode(cross_file_prompt_lines[i])) if extra_token_num < 0: break # join the lines back cross_file_prompt = "\n".join(cross_file_prompt_lines[:i+1]) + "\n\n" # combine the cross-file prompt and in-file prompt prompt = cross_file_prompt + in_file_prompt # normalize some empty lines prompt = re.sub(r'\n{4,}', '\n\n', prompt) return prompt ``` - **Q:** How to load the dataset? **A:** You can simply use the following code to load the dataset: ```python from datasets import load_dataset dataset = load_dataset("tianyang/repobench_java_v1.1") ``` To construct the prompt for next line prediction, you can refer to the official implementation provided in the previous question and use the `construct_prompt` function to construct the prompt, for example: ```python from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/deepseek-coder-1.3b-base") model = AutoModelForCausalLM.from_pretrained("deepseek-ai/deepseek-coder-1.3b-base") prompt = construct_prompt(dataset['cross_file_first'][0], language="java", tokenizer=tokenizer, max_token_nums=15800) ``` - **Q:** How often will the dataset be updated? **A:** We plan to update the dataset every three months, but there might be slight delays considering the time required for data crawling and our own schedules. If you require updated data, please feel free to contact us, and we can coordinate the timing and expedite the process. - **Q:** What models should I use to evaluate the dataset? **A:** RepoBench is designed to evaluate base models, not those that have been instruction fine-tuned. Please use base models for evaluation. - **Q:** I am training a new model but the knowledge cutoff date is after the dataset's. Can you provide me with the latest data? **A:** Sure! We are happy to provide you with the latest data (even customized data with specific requirements). Please feel free to contact us. - **Q:** Can I opt-out? **A:** Yes, you can opt-out your repository from the dataset. Please check [Am I in RepoBench?](https://huggingface.co/spaces/tianyang/in-the-repobench), we will upload the raw data of the repository information we crawled at least 15 days before the dataset creation and release. We will respect your decision and remove your repository from the dataset if you opt-out. ## Citation If you find RepoBench useful in your research, please consider citing the paper using the following BibTeX entry: ```bibtex @misc{liu2023repobench, title={RepoBench: Benchmarking Repository-Level Code Auto-Completion Systems}, author={Tianyang Liu and Canwen Xu and Julian McAuley}, year={2024}, url={https://arxiv.org/abs/2306.03091}, booktitle={International Conference on Learning Representations} } ``` Your interest and contributions to RepoBench are immensely valued. Happy coding! πŸš€
tianyang/repobench_java_v1.1
[ "task_categories:text-generation", "language:en", "license:cc", "code", "arxiv:2306.03091", "region:us" ]
2024-02-05T14:50:07+00:00
{"language": ["en"], "license": "cc", "task_categories": ["text-generation"], "configs": [{"config_name": "default", "data_files": [{"split": "cross_file_first", "path": "data/cross_file_first-*"}, {"split": "cross_file_random", "path": "data/cross_file_random-*"}, {"split": "in_file", "path": "data/in_file-*"}]}], "dataset_info": {"features": [{"name": "repo_name", "dtype": "string"}, {"name": "file_path", "dtype": "string"}, {"name": "context", "list": [{"name": "identifier", "dtype": "string"}, {"name": "path", "dtype": "string"}, {"name": "snippet", "dtype": "string"}]}, {"name": "import_statement", "dtype": "string"}, {"name": "token_num", "dtype": "int64"}, {"name": "cropped_code", "dtype": "string"}, {"name": "all_code", "dtype": "string"}, {"name": "next_line", "dtype": "string"}, {"name": "gold_snippet_index", "dtype": "int64"}, {"name": "created_at", "dtype": "string"}, {"name": "level", "dtype": "string"}], "splits": [{"name": "cross_file_first", "num_bytes": 504528431, "num_examples": 8033}, {"name": "cross_file_random", "num_bytes": 467242455, "num_examples": 7618}, {"name": "in_file", "num_bytes": 488999100, "num_examples": 7910}], "download_size": 472994299, "dataset_size": 1460769986}, "tags": ["code"]}
2024-02-14T21:29:44+00:00
[ "2306.03091" ]
[ "en" ]
TAGS #task_categories-text-generation #language-English #license-cc #code #arxiv-2306.03091 #region-us
RepoBench v1.1 (Java) ===================== Introduction ------------ This dataset presents the Java portion of RepoBench v1.1 (ICLR 2024). The data encompasses a collection from GitHub, spanning the period from October 6th to November 31st, 2023. With a commitment to data integrity, we've implemented a deduplication process based on file content against the Stack v2 dataset (coming soon), aiming to mitigate data leakage and memorization concerns. Resources and Links ------------------- * Paper * GitHub * Dataset Introduction FAQs ---- * Q: What do the features in the dataset mean? A: Imagine you're coding and you want to write the next line of your code. The dataset provides you the following information: + 'repo\_name' (string): the name of the repository + 'file\_path' (string): the path of the current file + 'context' (list): the cross-file code snippets that might be helpful for writing the next line: - 'identifier' (string): the identifier of the code snippet - 'path' (string): the path of the code snippet - 'snippet' (string): the code snippet + 'import\_statement' (string): the import statement of the current file + 'cropped\_code' (string): the cropped code of the current file (up to previous 120 lines) + 'all\_code' (string): the entire code of the current file (not cropped) + 'next\_line' (string): the next line of the code (this serves as the target) + 'gold\_snippet\_index' (int): the index of the gold snippet in the context (which will be used in next line, just for reference, you should not use this for next line prediction) + 'created\_at' (string): the creation time of the repository + 'level' (string): the level of next line completion, which is measured by the number of tokens for the whole prompt (including all the context, import statement, cropped code and some neccessary separator tokens) * Q: How does the level be defined? A: The level is determined by the number of tokens for the whole prompt (including all the context, import statement, cropped code and some neccessary separator tokens). The token number is calculated by the tokenizer of GPT-4 by using tiktoken. The following table shows the level definition: * Q: What does the different splits mean? A: The dataset is split into three parts: + 'cross\_file\_first': the next line of code utilizes content from a cross-file code snippet and it is its first usage within current file. + 'cross\_file\_random': the next line of code utilizes content from a cross-file code snippet and it is NOT its first usage within current file. + 'in\_file': the next line of code does not utilize content from a cross-file code snippet. * Q: How to construct the prompt for next line prediction? A: We hereby provide the official implementation for constructing prompts. Please note that the methods described below are not necessarily the optimal way of construction. Reordering, retrieval argumentation, or employing different cropping/construction techniques could potentially lead to varying degrees of improvement. Ensure that your model evaluations are conducted in a fair manner. * Q: How to load the dataset? A: You can simply use the following code to load the dataset: To construct the prompt for next line prediction, you can refer to the official implementation provided in the previous question and use the 'construct\_prompt' function to construct the prompt, for example: * Q: How often will the dataset be updated? A: We plan to update the dataset every three months, but there might be slight delays considering the time required for data crawling and our own schedules. If you require updated data, please feel free to contact us, and we can coordinate the timing and expedite the process. * Q: What models should I use to evaluate the dataset? A: RepoBench is designed to evaluate base models, not those that have been instruction fine-tuned. Please use base models for evaluation. * Q: I am training a new model but the knowledge cutoff date is after the dataset's. Can you provide me with the latest data? A: Sure! We are happy to provide you with the latest data (even customized data with specific requirements). Please feel free to contact us. * Q: Can I opt-out? A: Yes, you can opt-out your repository from the dataset. Please check Am I in RepoBench?, we will upload the raw data of the repository information we crawled at least 15 days before the dataset creation and release. We will respect your decision and remove your repository from the dataset if you opt-out. If you find RepoBench useful in your research, please consider citing the paper using the following BibTeX entry: Your interest and contributions to RepoBench are immensely valued. Happy coding!
[]
[ "TAGS\n#task_categories-text-generation #language-English #license-cc #code #arxiv-2306.03091 #region-us \n" ]
05c270d3dc8b7a42300a4b00af3157edbbd9bceb
# Dataset Card for "lmind_nq_train10000_eval6489_v1_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train10000_eval6489_v1_qa
[ "region:us" ]
2024-02-05T14:56:45+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 1159729, "num_examples": 10000}, {"name": "train_recite_qa", "num_bytes": 7573876, "num_examples": 10000}, {"name": "eval_qa", "num_bytes": 752802, "num_examples": 6489}, {"name": "eval_recite_qa", "num_bytes": 4912675, "num_examples": 6489}, {"name": "all_docs", "num_bytes": 9144930, "num_examples": 14014}, {"name": "all_docs_eval", "num_bytes": 9144126, "num_examples": 14014}, {"name": "train", "num_bytes": 1159729, "num_examples": 10000}, {"name": "validation", "num_bytes": 752802, "num_examples": 6489}], "download_size": 21497845, "dataset_size": 34600669}}
2024-02-05T14:57:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train10000_eval6489_v1_qa" More Information needed
[ "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_qa\"\n\nMore Information needed" ]
a154c27d7a9e7a7ec1aa8e5e776d2c7468d0c3fe
# Dataset Card for "lmind_nq_train10000_eval6489_v1_doc" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train10000_eval6489_v1_doc
[ "region:us" ]
2024-02-05T14:57:27+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 1159729, "num_examples": 10000}, {"name": "train_recite_qa", "num_bytes": 7573876, "num_examples": 10000}, {"name": "eval_qa", "num_bytes": 752802, "num_examples": 6489}, {"name": "eval_recite_qa", "num_bytes": 4912675, "num_examples": 6489}, {"name": "all_docs", "num_bytes": 9144930, "num_examples": 14014}, {"name": "all_docs_eval", "num_bytes": 9144126, "num_examples": 14014}, {"name": "train", "num_bytes": 9144930, "num_examples": 14014}, {"name": "validation", "num_bytes": 9144930, "num_examples": 14014}], "download_size": 31863130, "dataset_size": 50977998}}
2024-02-05T14:58:18+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train10000_eval6489_v1_doc" More Information needed
[ "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_doc\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_doc\"\n\nMore Information needed" ]
2a93c212d1c5c61a6edd3514a44bf43661b169cd
# Dataset Card for "lmind_nq_train10000_eval6489_v1_docidx" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train10000_eval6489_v1_docidx
[ "region:us" ]
2024-02-05T14:58:20+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 1159729, "num_examples": 10000}, {"name": "train_recite_qa", "num_bytes": 7573876, "num_examples": 10000}, {"name": "eval_qa", "num_bytes": 752802, "num_examples": 6489}, {"name": "eval_recite_qa", "num_bytes": 4912675, "num_examples": 6489}, {"name": "all_docs", "num_bytes": 9144930, "num_examples": 14014}, {"name": "all_docs_eval", "num_bytes": 9144126, "num_examples": 14014}, {"name": "train", "num_bytes": 9144930, "num_examples": 14014}, {"name": "validation", "num_bytes": 9144126, "num_examples": 14014}], "download_size": 31978370, "dataset_size": 50977194}}
2024-02-05T14:58:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train10000_eval6489_v1_docidx" More Information needed
[ "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_docidx\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_docidx\"\n\nMore Information needed" ]
038b266d50721dc27ef9691635d8a0cf20770956
# Dataset Card for "lmind_nq_train10000_eval6489_v1_doc_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train10000_eval6489_v1_doc_qa
[ "region:us" ]
2024-02-05T14:58:52+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 1159729, "num_examples": 10000}, {"name": "train_recite_qa", "num_bytes": 7573876, "num_examples": 10000}, {"name": "eval_qa", "num_bytes": 752802, "num_examples": 6489}, {"name": "eval_recite_qa", "num_bytes": 4912675, "num_examples": 6489}, {"name": "all_docs", "num_bytes": 9144930, "num_examples": 14014}, {"name": "all_docs_eval", "num_bytes": 9144126, "num_examples": 14014}, {"name": "train", "num_bytes": 10304659, "num_examples": 24014}, {"name": "validation", "num_bytes": 752802, "num_examples": 6489}], "download_size": 27244826, "dataset_size": 43745599}}
2024-02-05T14:59:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train10000_eval6489_v1_doc_qa" More Information needed
[ "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_doc_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_doc_qa\"\n\nMore Information needed" ]
ecaa4f5364eb7d9ee230d9eadebd3147da03dd23
# Dataset Card for "lmind_nq_train10000_eval6489_v1_recite_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train10000_eval6489_v1_recite_qa
[ "region:us" ]
2024-02-05T14:59:20+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 1159729, "num_examples": 10000}, {"name": "train_recite_qa", "num_bytes": 7573876, "num_examples": 10000}, {"name": "eval_qa", "num_bytes": 752802, "num_examples": 6489}, {"name": "eval_recite_qa", "num_bytes": 4912675, "num_examples": 6489}, {"name": "all_docs", "num_bytes": 9144930, "num_examples": 14014}, {"name": "all_docs_eval", "num_bytes": 9144126, "num_examples": 14014}, {"name": "train", "num_bytes": 16718806, "num_examples": 24014}, {"name": "validation", "num_bytes": 4912675, "num_examples": 6489}], "download_size": 33726567, "dataset_size": 54319619}}
2024-02-05T14:59:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train10000_eval6489_v1_recite_qa" More Information needed
[ "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_recite_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_recite_qa\"\n\nMore Information needed" ]
427cabeca8fc2af288fc6c2c45180ddcb03aa8bf
# Dataset Card for "lmind_nq_train10000_eval6489_v1_reciteonly_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train10000_eval6489_v1_reciteonly_qa
[ "region:us" ]
2024-02-05T14:59:41+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 1159729, "num_examples": 10000}, {"name": "train_recite_qa", "num_bytes": 7573876, "num_examples": 10000}, {"name": "eval_qa", "num_bytes": 752802, "num_examples": 6489}, {"name": "eval_recite_qa", "num_bytes": 4912675, "num_examples": 6489}, {"name": "all_docs", "num_bytes": 9144930, "num_examples": 14014}, {"name": "all_docs_eval", "num_bytes": 9144126, "num_examples": 14014}, {"name": "train", "num_bytes": 7573876, "num_examples": 10000}, {"name": "validation", "num_bytes": 4912675, "num_examples": 6489}], "download_size": 27978361, "dataset_size": 45174689}}
2024-02-05T15:00:04+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train10000_eval6489_v1_reciteonly_qa" More Information needed
[ "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_reciteonly_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_reciteonly_qa\"\n\nMore Information needed" ]
a4304bc63194df10a778349b83c97d54011cc734
# Dr. Nicefollows's Worry Free AIQA Chat Dataset ## Overview This dataset contains high-quality chat samples focused on AI-related (or other computer cience) questions and answers. It is designed following the LIMA: Less Is More for Alignment principle from MetaAI: emphasizing the importance of quality over quantity in training data. Despite its modest size, the dataset's quality ensures its effectiveness in training and fine-tuning conversational AI models. In this version, each chat has one user query and assistant answer. In the next version, it will become a conversation of multiple rounds. ## Dataset Format The dataset is structured in the Vicuna 1.1 format, featuring one-round chats. This format is chosen for its compatibility with various conversational AI training paradigms and its efficiency in representing dialogues. ## Volume The dataset comprises a few thousand chat samples. Each sample has been carefully curated to ensure the highest quality, aligning with the LIMA principle. ## Licensing Our dataset is worry-free regarding proprietary issues, as it is not automatically generated by a proprietary chatbot. This dataset is released under the Apache License 2.0. This license allows for broad freedom in usage and modification, provided that proper credit is given and changes are documented. For full license terms, please refer to the LICENSE file. ## Use Case This dataset is ideal for training conversational AI models, specifically those focused on AI topics. It can help in developing chatbots or virtual assistants capable of handling a wide range of travel-related queries with high accuracy. To use the dataset for finetuning a model with Axolotl, simply add the following to the .yml file: datasets: - path: DrNicefellow/Quality_WorryFree_AIQA_Chat_Dataset-v1 type: completion ## Feeling Generous? 😊 Eager to buy me a cup of 2$ coffe or iced tea?πŸ΅β˜• Sure, here is the link: [https://ko-fi.com/drnicefellow](https://ko-fi.com/drnicefellow). Please add a note on which one you want me to drink?
DrNicefellow/Quality_WorryFree_AIQA_Chat_Dataset-v1
[ "license:apache-2.0", "region:us" ]
2024-02-05T15:03:08+00:00
{"license": "apache-2.0", "dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 4903976, "num_examples": 2290}], "download_size": 1899330, "dataset_size": 4903976}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-05T21:34:06+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
# Dr. Nicefollows's Worry Free AIQA Chat Dataset ## Overview This dataset contains high-quality chat samples focused on AI-related (or other computer cience) questions and answers. It is designed following the LIMA: Less Is More for Alignment principle from MetaAI: emphasizing the importance of quality over quantity in training data. Despite its modest size, the dataset's quality ensures its effectiveness in training and fine-tuning conversational AI models. In this version, each chat has one user query and assistant answer. In the next version, it will become a conversation of multiple rounds. ## Dataset Format The dataset is structured in the Vicuna 1.1 format, featuring one-round chats. This format is chosen for its compatibility with various conversational AI training paradigms and its efficiency in representing dialogues. ## Volume The dataset comprises a few thousand chat samples. Each sample has been carefully curated to ensure the highest quality, aligning with the LIMA principle. ## Licensing Our dataset is worry-free regarding proprietary issues, as it is not automatically generated by a proprietary chatbot. This dataset is released under the Apache License 2.0. This license allows for broad freedom in usage and modification, provided that proper credit is given and changes are documented. For full license terms, please refer to the LICENSE file. ## Use Case This dataset is ideal for training conversational AI models, specifically those focused on AI topics. It can help in developing chatbots or virtual assistants capable of handling a wide range of travel-related queries with high accuracy. To use the dataset for finetuning a model with Axolotl, simply add the following to the .yml file: datasets: - path: DrNicefellow/Quality_WorryFree_AIQA_Chat_Dataset-v1 type: completion ## Feeling Generous? Eager to buy me a cup of 2$ coffe or iced tea? Sure, here is the link: URL Please add a note on which one you want me to drink?
[ "# Dr. Nicefollows's Worry Free AIQA Chat Dataset", "## Overview\nThis dataset contains high-quality chat samples focused on AI-related (or other computer cience) questions and answers. It is designed following the LIMA: Less Is More for Alignment principle from MetaAI: emphasizing the importance of quality over quantity in training data. Despite its modest size, the dataset's quality ensures its effectiveness in training and fine-tuning conversational AI models.\nIn this version, each chat has one user query and assistant answer. In the next version, it will become a conversation of multiple rounds.", "## Dataset Format\nThe dataset is structured in the Vicuna 1.1 format, featuring one-round chats. This format is chosen for its compatibility with various conversational AI training paradigms and its efficiency in representing dialogues.", "## Volume\nThe dataset comprises a few thousand chat samples. Each sample has been carefully curated to ensure the highest quality, aligning with the LIMA principle.", "## Licensing\nOur dataset is worry-free regarding proprietary issues, as it is not automatically generated by a proprietary chatbot. This dataset is released under the Apache License 2.0. This license allows for broad freedom in usage and modification, provided that proper credit is given and changes are documented. For full license terms, please refer to the LICENSE file.", "## Use Case\nThis dataset is ideal for training conversational AI models, specifically those focused on AI topics. It can help in developing chatbots or virtual assistants capable of handling a wide range of travel-related queries with high accuracy. To use the dataset for finetuning a model with Axolotl, simply add the following to the .yml file:\ndatasets:\n - path: DrNicefellow/Quality_WorryFree_AIQA_Chat_Dataset-v1\n type: completion", "## Feeling Generous? \nEager to buy me a cup of 2$ coffe or iced tea? Sure, here is the link: URL Please add a note on which one you want me to drink?" ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "# Dr. Nicefollows's Worry Free AIQA Chat Dataset", "## Overview\nThis dataset contains high-quality chat samples focused on AI-related (or other computer cience) questions and answers. It is designed following the LIMA: Less Is More for Alignment principle from MetaAI: emphasizing the importance of quality over quantity in training data. Despite its modest size, the dataset's quality ensures its effectiveness in training and fine-tuning conversational AI models.\nIn this version, each chat has one user query and assistant answer. In the next version, it will become a conversation of multiple rounds.", "## Dataset Format\nThe dataset is structured in the Vicuna 1.1 format, featuring one-round chats. This format is chosen for its compatibility with various conversational AI training paradigms and its efficiency in representing dialogues.", "## Volume\nThe dataset comprises a few thousand chat samples. Each sample has been carefully curated to ensure the highest quality, aligning with the LIMA principle.", "## Licensing\nOur dataset is worry-free regarding proprietary issues, as it is not automatically generated by a proprietary chatbot. This dataset is released under the Apache License 2.0. This license allows for broad freedom in usage and modification, provided that proper credit is given and changes are documented. For full license terms, please refer to the LICENSE file.", "## Use Case\nThis dataset is ideal for training conversational AI models, specifically those focused on AI topics. It can help in developing chatbots or virtual assistants capable of handling a wide range of travel-related queries with high accuracy. To use the dataset for finetuning a model with Axolotl, simply add the following to the .yml file:\ndatasets:\n - path: DrNicefellow/Quality_WorryFree_AIQA_Chat_Dataset-v1\n type: completion", "## Feeling Generous? \nEager to buy me a cup of 2$ coffe or iced tea? Sure, here is the link: URL Please add a note on which one you want me to drink?" ]
c168bbff3d4014c1ed8abe976d259b5c599f775e
# Usage When downloading, specify which files you want to download and set the split to `train` (required by `datasets`). ```python from datasets import load_dataset nouns = load_dataset("fairnlp/holistic-bias", data_files=["nouns.csv"], split="train") sentences = load_dataset("fairnlp/holistic-bias", data_files=["sentences.csv"], split="train") ``` # Dataset Card for Holistic Bias This dataset contains the source data of the Holistic Bias dataset as described [by Smith et. al. (2022)](https://arxiv.org/abs/2205.09209). The dataset contains noun phrases and sentences used to measure the likelihood bias of various models. The original dataset is released on [GitHub](https://github.com/facebookresearch/ResponsibleNLP/tree/main/holistic_bias). Disclaimer: this re-release of the dataset is not associated with the original authors. The dataset is released under the [CC-BY-SA-4.0](https://creativecommons.org/licenses/by-sa/4.0/) license. ## Dataset Details The data is generated from the [official generation script](https://github.com/facebookresearch/ResponsibleNLP/blob/main/holistic_bias/generate_sentences.py). The data is the v1.0 data from the original paper. For details on the methodology, please refer to the original paper. This dataset is contributed to Hugging Face as part of the [FairNLP `fairscore` library](https://github.com/FairNLP/fairscore/). ### Dataset Sources - **Paper:** https://arxiv.org/pdf/2205.09209.pdf **BibTeX:** ```bibtex @inproceedings{smith2022m, title={β€œI’m sorry to hear that”: Finding New Biases in Language Models with a Holistic Descriptor Dataset}, author={Smith, Eric Michael and Hall, Melissa and Kambadur, Melanie and Presani, Eleonora and Williams, Adina}, booktitle={Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing}, pages={9180--9211}, year={2022} } ```
fairnlp/holistic-bias
[ "language:en", "license:cc-by-sa-4.0", "arxiv:2205.09209", "region:us" ]
2024-02-05T15:26:15+00:00
{"language": "en", "license": "cc-by-sa-4.0", "configs": [{"config_name": "noun_phrases", "data_files": [{"split": "test", "path": "nouns.csv"}]}, {"config_name": "sentences", "data_files": [{"split": "test", "path": "sentences.csv"}]}]}
2024-02-05T15:27:44+00:00
[ "2205.09209" ]
[ "en" ]
TAGS #language-English #license-cc-by-sa-4.0 #arxiv-2205.09209 #region-us
# Usage When downloading, specify which files you want to download and set the split to 'train' (required by 'datasets'). # Dataset Card for Holistic Bias This dataset contains the source data of the Holistic Bias dataset as described by Smith et. al. (2022). The dataset contains noun phrases and sentences used to measure the likelihood bias of various models. The original dataset is released on GitHub. Disclaimer: this re-release of the dataset is not associated with the original authors. The dataset is released under the CC-BY-SA-4.0 license. ## Dataset Details The data is generated from the official generation script. The data is the v1.0 data from the original paper. For details on the methodology, please refer to the original paper. This dataset is contributed to Hugging Face as part of the FairNLP 'fairscore' library. ### Dataset Sources - Paper: URL BibTeX:
[ "# Usage\n\nWhen downloading, specify which files you want to download and set the split to 'train' (required by 'datasets').", "# Dataset Card for Holistic Bias\n\nThis dataset contains the source data of the Holistic Bias dataset as described by Smith et. al. (2022). The dataset contains noun phrases and sentences used to measure the likelihood bias of various models. The original dataset is released on GitHub.\n\nDisclaimer: this re-release of the dataset is not associated with the original authors. The dataset is released under the CC-BY-SA-4.0 license.", "## Dataset Details\n\nThe data is generated from the official generation script. The data is the v1.0 data from the original paper.\n\nFor details on the methodology, please refer to the original paper. This dataset is contributed to Hugging\nFace as part of the FairNLP 'fairscore' library.", "### Dataset Sources\n\n- Paper: URL\n\nBibTeX:" ]
[ "TAGS\n#language-English #license-cc-by-sa-4.0 #arxiv-2205.09209 #region-us \n", "# Usage\n\nWhen downloading, specify which files you want to download and set the split to 'train' (required by 'datasets').", "# Dataset Card for Holistic Bias\n\nThis dataset contains the source data of the Holistic Bias dataset as described by Smith et. al. (2022). The dataset contains noun phrases and sentences used to measure the likelihood bias of various models. The original dataset is released on GitHub.\n\nDisclaimer: this re-release of the dataset is not associated with the original authors. The dataset is released under the CC-BY-SA-4.0 license.", "## Dataset Details\n\nThe data is generated from the official generation script. The data is the v1.0 data from the original paper.\n\nFor details on the methodology, please refer to the original paper. This dataset is contributed to Hugging\nFace as part of the FairNLP 'fairscore' library.", "### Dataset Sources\n\n- Paper: URL\n\nBibTeX:" ]
2a0680e01381a535363725511dbf923190782629
# openhermes This is a cleansed version of [teknium/openhermes](https://huggingface.co/datasets/teknium/openhermes) ## Usage ```python from datasets import load_dataset dataset = load_dataset("Sharathhebbar24/openhermes", split="train") ```
Sharathhebbar24/openhermes
[ "region:us" ]
2024-02-05T15:44:46+00:00
{"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 321396721, "num_examples": 242831}], "download_size": 139098798, "dataset_size": 321396721}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-05T15:48:53+00:00
[]
[]
TAGS #region-us
# openhermes This is a cleansed version of teknium/openhermes ## Usage
[ "# openhermes\n\nThis is a cleansed version of teknium/openhermes", "## Usage" ]
[ "TAGS\n#region-us \n", "# openhermes\n\nThis is a cleansed version of teknium/openhermes", "## Usage" ]
62728f3f858b435251b201212c2299f89ca8148d
prompt,text """Apa syarat pembuatan surat izin kerja praktik atau KP?"",""Syarat yang diperlukan dalam pengurusan surat izin kerja praktik adalah sebagai berikut 1 Transkrip 2 Krs 3 Bukti pembayaran UKT terakhir 4 Surat peminjaman alat""" """Apa saja persyaratan yang harus dilampirkan untuk mendapatkan Surat Izin Kerja Praktik?"",""Syarat yang diperlukan dalam pengurusan surat izin kerja praktik adalah sebagai berikut (1) Transkrip (2) Krs (3) Bukti pembayaran UKT terakhir (4) Surat peminjaman alat""" """Bagaimana alur pengurusan surat izin kerja praktik atau KP?"",""Alur pengurusan Surat Izin Kerja Praktik adalah sebagai berikut 1 Mahasiswa melengkapi persyaratan Kerja Praktik atau disingkat menjadi KP 2 Persyaratan yang sudah dilengkapi dibawa kepada admin di jurusan atau program studi 3 Setelahnya admin memeriksa persyaratan yang dibawa oleh mahasiswa di jurusan atau program studi dan akan menerima usulan dari mahasiswa tentang Kerja Praktik atau disingkat menjadi KP 4 Kemudian mahasiswa akan menerima surat usulan dari jurusan atau program studi untuk dilampirkan beserta syarat ke admin di fakultas 5 Mengirimkan syarat beserta surat usulan yang diterima oleh mahasiswa di jurusan atau program studi ke admin di fakultas 7 Syarat beserta surat usulan yang telah dikeluarkan oleh jurusan atau program studi akan diperiksa kembali oleh admin di fakultas 8 Selanjutnya admin di fakultas akan mengeluarkan surat izin penelitian kerja praktik atau disingkat menjadi KP yang telah ditanda tangani Dekan dan Wakil Dekan serta memiliki nomor surat dan cap oleh fakultas 9 Surat izin penelitian kerja praktik atau KP dapat diambil di loket 2""" """Apa itu Surat Izin Kerja Praktik KP?"",""Surat Izin Kerja Praktik adalah dokumen resmi yang memberikan izin kepada mahasiswa untuk melakukan kerja praktik sebagai bagian dari jurusan studi mereka""" """Bagaimana cara mendapatkan Surat Izin Kerja Praktik?"",""Anda perlu melengkapi persyaratan seperti transkrip KRS bukti pembayaran UKT terakhir dan surat peminjaman alat Ajukan persyaratan ini kepada admin di jurusan atau program studi""" """Apakah Surat Izin Kerja Praktik diperlukan untuk setiap mahasiswa yang ingin melakukan kerja praktik?"",""Ya Surat Izin Kerja Praktik diperlukan sebagai syarat resmi untuk melaksanakan kerja praktik""" """Bagaimana langkah-langkah pengajuan Surat Izin Kerja Praktik?"",""Alur pengurusan Surat Izin Kerja Praktik adalah sebagai berikut 1 Mahasiswa melengkapi persyaratan Kerja Praktik atau disingkat menjadi KP 2 Persyaratan yang sudah dilengkapi dibawa kepada admin di jurusan atau program studi 4 Setelahnya admin memeriksa persyaratan yang dibawa oleh mahasiswa di jurusan atau program studi dan akan menerima usulan dari mahasiswa tentang Kerja Praktik atau disingkat menjadi KP 4 Kemudian mahasiswa akan menerima surat usulan dari jurusan atau program studi untuk dilampirkan beserta syarat ke admin di fakultas 5 Mengirimkan syarat beserta surat usulan yang diterima oleh mahasiswa di jurusan atau program studi ke admin di fakultas 6 Syarat beserta surat usulan yang telah dikeluarkan oleh jurusan atau program studi akan diperiksa kembali oleh admin di fakultas 7 Selanjutnya admin di fakultas akan mengeluarkan surat izin penelitian kerja praktik atau disingkat menjadi KP yang telah ditanda tangani Dekan dan Wakil Dekan serta memiliki nomor surat dan cap oleh fakultas 8 Surat izin penelitian kerja praktik atau KP dapat diambil di loket 2""" """Apakah mahasiswa perlu membuat surat usulan sendiri?"",""Ya mahasiswa perlu membuat surat usulan dan mengajukannya kepada admin di jurusan atau program studi bersama dengan persyaratan lainnya""" """Apakah surat usulan perlu disetujui oleh admin di jurusan sebelum diajukan ke fakultas?"",""Ya admin di jurusan akan memeriksa dan menyetujui surat usulan sebelum mahasiswa dapat melanjutkan ke langkah berikutnya"""
betajuned/peraturan-elektro-unila
[ "region:us" ]
2024-02-05T15:53:05+00:00
{}
2024-02-06T14:31:22+00:00
[]
[]
TAGS #region-us
prompt,text """Apa syarat pembuatan surat izin kerja praktik atau KP?"",""Syarat yang diperlukan dalam pengurusan surat izin kerja praktik adalah sebagai berikut 1 Transkrip 2 Krs 3 Bukti pembayaran UKT terakhir 4 Surat peminjaman alat""" """Apa saja persyaratan yang harus dilampirkan untuk mendapatkan Surat Izin Kerja Praktik?"",""Syarat yang diperlukan dalam pengurusan surat izin kerja praktik adalah sebagai berikut (1) Transkrip (2) Krs (3) Bukti pembayaran UKT terakhir (4) Surat peminjaman alat""" """Bagaimana alur pengurusan surat izin kerja praktik atau KP?"",""Alur pengurusan Surat Izin Kerja Praktik adalah sebagai berikut 1 Mahasiswa melengkapi persyaratan Kerja Praktik atau disingkat menjadi KP 2 Persyaratan yang sudah dilengkapi dibawa kepada admin di jurusan atau program studi 3 Setelahnya admin memeriksa persyaratan yang dibawa oleh mahasiswa di jurusan atau program studi dan akan menerima usulan dari mahasiswa tentang Kerja Praktik atau disingkat menjadi KP 4 Kemudian mahasiswa akan menerima surat usulan dari jurusan atau program studi untuk dilampirkan beserta syarat ke admin di fakultas 5 Mengirimkan syarat beserta surat usulan yang diterima oleh mahasiswa di jurusan atau program studi ke admin di fakultas 7 Syarat beserta surat usulan yang telah dikeluarkan oleh jurusan atau program studi akan diperiksa kembali oleh admin di fakultas 8 Selanjutnya admin di fakultas akan mengeluarkan surat izin penelitian kerja praktik atau disingkat menjadi KP yang telah ditanda tangani Dekan dan Wakil Dekan serta memiliki nomor surat dan cap oleh fakultas 9 Surat izin penelitian kerja praktik atau KP dapat diambil di loket 2""" """Apa itu Surat Izin Kerja Praktik KP?"",""Surat Izin Kerja Praktik adalah dokumen resmi yang memberikan izin kepada mahasiswa untuk melakukan kerja praktik sebagai bagian dari jurusan studi mereka""" """Bagaimana cara mendapatkan Surat Izin Kerja Praktik?"",""Anda perlu melengkapi persyaratan seperti transkrip KRS bukti pembayaran UKT terakhir dan surat peminjaman alat Ajukan persyaratan ini kepada admin di jurusan atau program studi""" """Apakah Surat Izin Kerja Praktik diperlukan untuk setiap mahasiswa yang ingin melakukan kerja praktik?"",""Ya Surat Izin Kerja Praktik diperlukan sebagai syarat resmi untuk melaksanakan kerja praktik""" """Bagaimana langkah-langkah pengajuan Surat Izin Kerja Praktik?"",""Alur pengurusan Surat Izin Kerja Praktik adalah sebagai berikut 1 Mahasiswa melengkapi persyaratan Kerja Praktik atau disingkat menjadi KP 2 Persyaratan yang sudah dilengkapi dibawa kepada admin di jurusan atau program studi 4 Setelahnya admin memeriksa persyaratan yang dibawa oleh mahasiswa di jurusan atau program studi dan akan menerima usulan dari mahasiswa tentang Kerja Praktik atau disingkat menjadi KP 4 Kemudian mahasiswa akan menerima surat usulan dari jurusan atau program studi untuk dilampirkan beserta syarat ke admin di fakultas 5 Mengirimkan syarat beserta surat usulan yang diterima oleh mahasiswa di jurusan atau program studi ke admin di fakultas 6 Syarat beserta surat usulan yang telah dikeluarkan oleh jurusan atau program studi akan diperiksa kembali oleh admin di fakultas 7 Selanjutnya admin di fakultas akan mengeluarkan surat izin penelitian kerja praktik atau disingkat menjadi KP yang telah ditanda tangani Dekan dan Wakil Dekan serta memiliki nomor surat dan cap oleh fakultas 8 Surat izin penelitian kerja praktik atau KP dapat diambil di loket 2""" """Apakah mahasiswa perlu membuat surat usulan sendiri?"",""Ya mahasiswa perlu membuat surat usulan dan mengajukannya kepada admin di jurusan atau program studi bersama dengan persyaratan lainnya""" """Apakah surat usulan perlu disetujui oleh admin di jurusan sebelum diajukan ke fakultas?"",""Ya admin di jurusan akan memeriksa dan menyetujui surat usulan sebelum mahasiswa dapat melanjutkan ke langkah berikutnya"""
[]
[ "TAGS\n#region-us \n" ]
89d2474838b0c25415c0769488c2abe916d351af
The corpus consists of texts written by Romanian authors between 19th century and present, representing stories, short-stories, fairy tales and sketches. The current version contains 19 authors, 1263 full texts and 12516 paragraphs of around 200 words each, preserving paragraphs integrity. Note: This is an extended version of ROST corpus (https://www.kaggle.com/datasets/sandamariaavram/rost-romanian-stories-and-other-texts), which only contains 400 texts and 10 authors. ## Dataset Overview | Author | FT | PP | M(SD) FT | M(SD) Unique Words | M(SD) TTR | |----------------------|------|------|---------------------|----------------------|----------------------| | Alexandru Vlahuta | 96 | 647 | 1629.16 (1341.48) | 735.19 (462.04) | 0.5110 (0.0844) | | Anton Bacalbasa | 132 | 485 | 808.17 (720.04) | 392.20 (244.57) | 0.5256 (0.0660) | | Barbu St. Delavrancea | 47 | 747 | 4015.40 (2224.96) | 1391.72 (658.60) | 0.3730 (0.0599) | | Costache Negruzzi | 24 | 343 | 3482.62 (2253.38) | 1236.46 (694.14) | 0.4027 (0.0883) | | Emil Garleanu | 55 | 353 | 1533.58 (1582.43) | 609.09 (449.03) | 0.4649 (0.0767) | | Emilia Plugaru | 41 | 382 | 2176.71 (1705.21) | 792.00 (454.83) | 0.4091 (0.0702) | | George Toparceanu | 46 | 331 | 1689.11 (1246.86) | 711.00 (412.92) | 0.4728 (0.0815) | | Ioan Slavici | 89 | 1716 | 4692.76 (2156.69) | 1306.64 (485.87) | 0.3043 (0.0665) | | Ion Creanga | 45 | 424 | 2291.13 (2328.91) | 720.96 (554.58) | 0.4420 (0.1537) | | Ion Luca Caragiale | 60 | 585 | 2444.30 (1541.96) | 895.13 (466.55) | 0.3832 (0.0485) | | Liviu Rebreanu | 59 | 619 | 2544.49 (1770.39) | 969.80 (518.88) | 0.4165 (0.0654) | | Mihai Eminescu | 27 | 405 | 3642.78 (2167.54) | 1284.67 (674.06) | 0.3834 (0.0767) | | Mihai Oltean | 32 | 68 | 409.62 (394.16) | 216.28 (174.42) | 0.5938 (0.1093) | | Mihail Sebastian | 46 | 658 | 3478.37 (1826.51) | 1234.85 (472.30) | 0.3803 (0.0532) | | Nicolae Filimon | 35 | 375 | 2606.57 (1701.70) | 998.20 (540.52) | 0.4173 (0.0781) | | Nicolae Iorga | 306 | 2982 | 2437.67 (2215.16) | 970.28 (741.50) | 0.4834 (0.1054) | | Panait Istrati | 20 | 499 | 6299.85 (1202.32) | 2177.75 (369.46) | 0.3494 (0.0240) | | Petre Ispirescu | 40 | 630 | 3768.72 (1614.16) | 1126.40 (359.51) | 0.3183 (0.0517) | | Traian Demetrescu | 63 | 267 | 976.13 (581.40) | 472.32 (234.24) | 0.5279 (0.0845) | | **Aggregate** | **1263** | **12516** | | | |
readerbench/ro-stories
[ "size_categories:10K<n<100K", "language:ro", "license:apache-2.0", "dataset", "romanian", "stories", "region:us" ]
2024-02-05T16:09:29+00:00
{"language": ["ro"], "license": "apache-2.0", "size_categories": ["10K<n<100K"], "tags": ["dataset", "romanian", "stories"]}
2024-02-08T15:55:03+00:00
[]
[ "ro" ]
TAGS #size_categories-10K<n<100K #language-Romanian #license-apache-2.0 #dataset #romanian #stories #region-us
The corpus consists of texts written by Romanian authors between 19th century and present, representing stories, short-stories, fairy tales and sketches. The current version contains 19 authors, 1263 full texts and 12516 paragraphs of around 200 words each, preserving paragraphs integrity. Note: This is an extended version of ROST corpus (URL which only contains 400 texts and 10 authors. Dataset Overview ----------------
[]
[ "TAGS\n#size_categories-10K<n<100K #language-Romanian #license-apache-2.0 #dataset #romanian #stories #region-us \n" ]
8cc955aa4fe517a754a16ba5ef2a5c869337d2ba
# Dataset of megumin/めぐみん (Kono Subarashii Sekai ni Shukufuku wo!) This is the dataset of megumin/めぐみん (Kono Subarashii Sekai ni Shukufuku wo!), containing 450 images and their tags. The core tags of this character are `short_hair, brown_hair, red_eyes, hat, witch_hat, black_hair`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 450 | 430.69 MiB | [Download](https://huggingface.co/datasets/CyberHarem/megumin_konosuba/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 450 | 323.90 MiB | [Download](https://huggingface.co/datasets/CyberHarem/megumin_konosuba/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 844 | 566.88 MiB | [Download](https://huggingface.co/datasets/CyberHarem/megumin_konosuba/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 450 | 430.50 MiB | [Download](https://huggingface.co/datasets/CyberHarem/megumin_konosuba/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 844 | 722.60 MiB | [Download](https://huggingface.co/datasets/CyberHarem/megumin_konosuba/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/megumin_konosuba', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 10 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, anime_coloring, solo, cape, parody, closed_mouth, bangs | | 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, holding_staff, red_dress, short_hair_with_long_locks, solo, black_cape, black_gloves, collarbone, fingerless_gloves, looking_at_viewer, bangs, upper_body, anime_coloring, black_choker, sidelocks | | 2 | 25 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, cape, red_dress, short_hair_with_long_locks, fingerless_gloves, solo, belt, black_gloves, holding_staff, choker, collar | | 3 | 38 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | bandaged_leg, belt, black_thighhighs, fingerless_gloves, red_dress, 1girl, single_thighhigh, cape, short_hair_with_long_locks, solo, black_gloves, mismatched_legwear, looking_at_viewer, holding_staff | | 4 | 5 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, bangs, bare_shoulders, black_choker, collarbone, fingerless_gloves, red_dress, short_hair_with_long_locks, sidelocks, solo, upper_body, black_gloves, off_shoulder, indoors, anime_coloring, chair, open_mouth, sweatdrop | | 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, anime_coloring, clenched_teeth, solo, bangs, black_choker, cape, collar, parody | | 6 | 6 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 1girl, anime_coloring, bare_shoulders, choker, collar, parody, short_hair_with_long_locks, solo, open_mouth | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | anime_coloring | solo | cape | parody | closed_mouth | bangs | holding_staff | red_dress | short_hair_with_long_locks | black_cape | black_gloves | collarbone | fingerless_gloves | looking_at_viewer | upper_body | black_choker | sidelocks | belt | choker | collar | bandaged_leg | black_thighhighs | single_thighhigh | mismatched_legwear | bare_shoulders | off_shoulder | indoors | chair | open_mouth | sweatdrop | clenched_teeth | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-----------------|:-------|:-------|:---------|:---------------|:--------|:----------------|:------------|:-----------------------------|:-------------|:---------------|:-------------|:--------------------|:--------------------|:-------------|:---------------|:------------|:-------|:---------|:---------|:---------------|:-------------------|:-------------------|:---------------------|:-----------------|:---------------|:----------|:--------|:-------------|:------------|:-----------------| | 0 | 10 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | | | | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | 2 | 25 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | | X | X | | | | X | X | X | | X | | X | | | | | X | X | X | | | | | | | | | | | | | 3 | 38 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | X | X | | | | X | X | X | | X | | X | X | | | | X | | | X | X | X | X | | | | | | | | | 4 | 5 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | X | X | | | | X | | X | X | | X | X | X | | X | X | X | | | | | | | | X | X | X | X | X | X | | | 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | X | X | X | X | | X | | | | | | | | | | X | | | | X | | | | | | | | | | | X | | 6 | 6 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | X | X | X | | X | | | | | X | | | | | | | | | | X | X | | | | | X | | | | X | | |
CyberHarem/megumin_konosuba
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2024-02-05T16:35:55+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-02-05T17:30:09+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of megumin/めぐみん (Kono Subarashii Sekai ni Shukufuku wo!) ================================================================ This is the dataset of megumin/めぐみん (Kono Subarashii Sekai ni Shukufuku wo!), containing 450 images and their tags. The core tags of this character are 'short\_hair, brown\_hair, red\_eyes, hat, witch\_hat, black\_hair', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
ea4da5cc67e7729bea8054fd8063f7d76300d8ae
# Dataset of aqua/γ‚’γ‚―γ‚’ (Kono Subarashii Sekai ni Shukufuku wo!) This is the dataset of aqua/γ‚’γ‚―γ‚’ (Kono Subarashii Sekai ni Shukufuku wo!), containing 758 images and their tags. The core tags of this character are `blue_hair, long_hair, hair_ornament, hair_rings, blue_eyes, bow, green_bow`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:------------|:---------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 758 | 685.89 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aqua_konosuba/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 758 | 535.83 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aqua_konosuba/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1558 | 1021.39 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aqua_konosuba/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 758 | 685.61 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aqua_konosuba/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1558 | 1.22 GiB | [Download](https://huggingface.co/datasets/CyberHarem/aqua_konosuba/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/aqua_konosuba', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 7 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, anime_coloring, bangs, bare_shoulders, detached_sleeves, open_mouth, solo, upper_body, hair_between_eyes, looking_at_viewer, single_hair_ring, medium_breasts | | 1 | 8 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, breasts, detached_sleeves, solo, bare_shoulders, single_hair_ring, upper_body, anime_coloring, looking_at_viewer, smile, hair_between_eyes | | 2 | 16 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, solo, open_mouth, anime_coloring, detached_sleeves | | 3 | 5 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, detached_sleeves, single_hair_ring, solo, thighhighs, very_long_hair, blue_skirt | | 4 | 13 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, smile, solo, detached_sleeves, closed_eyes, bare_shoulders | | 5 | 10 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, anime_coloring, blush, open_mouth, solo, tears, parody, crying, detached_sleeves, closed_eyes, meme | | 6 | 5 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 1girl, anime_coloring, bare_shoulders, closed_mouth, detached_sleeves, solo, bangs, hair_between_eyes, looking_at_viewer, upper_body | | 7 | 5 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | 1girl, anime_coloring, closed_mouth, solo, closed_eyes, hair_between_eyes, smile | | 8 | 5 | ![](samples/8/clu8-sample0.png) | ![](samples/8/clu8-sample1.png) | ![](samples/8/clu8-sample2.png) | ![](samples/8/clu8-sample3.png) | ![](samples/8/clu8-sample4.png) | 1girl, bare_shoulders, collarbone, solo, upper_body, anime_coloring, blush, smile, breasts | | 9 | 7 | ![](samples/9/clu9-sample0.png) | ![](samples/9/clu9-sample1.png) | ![](samples/9/clu9-sample2.png) | ![](samples/9/clu9-sample3.png) | ![](samples/9/clu9-sample4.png) | 1girl, blue_footwear, blue_shirt, blue_skirt, detached_sleeves, solo, thigh_boots, thighhighs_under_boots, breasts, open_mouth, white_thighhighs, very_long_hair | | 10 | 7 | ![](samples/10/clu10-sample0.png) | ![](samples/10/clu10-sample1.png) | ![](samples/10/clu10-sample2.png) | ![](samples/10/clu10-sample3.png) | ![](samples/10/clu10-sample4.png) | 1girl, anime_coloring, bangs, closed_mouth, hair_between_eyes, solo, blurry, looking_at_viewer, portrait, smile | | 11 | 7 | ![](samples/11/clu11-sample0.png) | ![](samples/11/clu11-sample1.png) | ![](samples/11/clu11-sample2.png) | ![](samples/11/clu11-sample3.png) | ![](samples/11/clu11-sample4.png) | 1boy, 1girl, detached_sleeves, solo_focus, blue_skirt, single_hair_ring, anime_coloring, breasts, looking_at_viewer, open_mouth | | 12 | 6 | ![](samples/12/clu12-sample0.png) | ![](samples/12/clu12-sample1.png) | ![](samples/12/clu12-sample2.png) | ![](samples/12/clu12-sample3.png) | ![](samples/12/clu12-sample4.png) | 2girls, detached_sleeves, open_mouth, solo_focus, blue_skirt, anime_coloring | | 13 | 5 | ![](samples/13/clu13-sample0.png) | ![](samples/13/clu13-sample1.png) | ![](samples/13/clu13-sample2.png) | ![](samples/13/clu13-sample3.png) | ![](samples/13/clu13-sample4.png) | 2girls, blue_footwear, blue_skirt, breasts, detached_sleeves, thighhighs, brown_hair, open_mouth, thigh_boots, 1boy, bare_shoulders, single_hair_ring | | 14 | 7 | ![](samples/14/clu14-sample0.png) | ![](samples/14/clu14-sample1.png) | ![](samples/14/clu14-sample2.png) | ![](samples/14/clu14-sample3.png) | ![](samples/14/clu14-sample4.png) | 1girl, enmaided, maid_apron, maid_headdress, zettai_ryouiki, breasts, solo, frills, single_hair_ring, white_thighhighs | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | anime_coloring | bangs | bare_shoulders | detached_sleeves | open_mouth | solo | upper_body | hair_between_eyes | looking_at_viewer | single_hair_ring | medium_breasts | breasts | smile | thighhighs | very_long_hair | blue_skirt | closed_eyes | blush | tears | parody | crying | meme | closed_mouth | collarbone | blue_footwear | blue_shirt | thigh_boots | thighhighs_under_boots | white_thighhighs | blurry | portrait | 1boy | solo_focus | 2girls | brown_hair | enmaided | maid_apron | maid_headdress | zettai_ryouiki | frills | |----:|----------:|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:--------|:-----------------|:--------|:-----------------|:-------------------|:-------------|:-------|:-------------|:--------------------|:--------------------|:-------------------|:-----------------|:----------|:--------|:-------------|:-----------------|:-------------|:--------------|:--------|:--------|:---------|:---------|:-------|:---------------|:-------------|:----------------|:-------------|:--------------|:-------------------------|:-------------------|:---------|:-----------|:-------|:-------------|:---------|:-------------|:-----------|:-------------|:-----------------|:-----------------|:---------| | 0 | 7 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 8 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | | X | X | | X | X | X | X | X | | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 2 | 16 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | | | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 3 | 5 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | | | X | | X | | | | X | | | | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | 4 | 13 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | | X | X | | X | | | | | | | X | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | 5 | 10 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | X | | | X | X | X | | | | | | | | | | | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | 6 | 5 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | X | X | X | X | X | | X | X | X | X | | | | | | | | | | | | | | X | | | | | | | | | | | | | | | | | | | 7 | 5 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | X | X | | | | | X | | X | | | | | X | | | | X | | | | | | X | | | | | | | | | | | | | | | | | | | 8 | 5 | ![](samples/8/clu8-sample0.png) | ![](samples/8/clu8-sample1.png) | ![](samples/8/clu8-sample2.png) | ![](samples/8/clu8-sample3.png) | ![](samples/8/clu8-sample4.png) | X | X | | X | | | X | X | | | | | X | X | | | | | X | | | | | | X | | | | | | | | | | | | | | | | | | 9 | 7 | ![](samples/9/clu9-sample0.png) | ![](samples/9/clu9-sample1.png) | ![](samples/9/clu9-sample2.png) | ![](samples/9/clu9-sample3.png) | ![](samples/9/clu9-sample4.png) | X | | | | X | X | X | | | | | | X | | | X | X | | | | | | | | | X | X | X | X | X | | | | | | | | | | | | | 10 | 7 | ![](samples/10/clu10-sample0.png) | ![](samples/10/clu10-sample1.png) | ![](samples/10/clu10-sample2.png) | ![](samples/10/clu10-sample3.png) | ![](samples/10/clu10-sample4.png) | X | X | X | | | | X | | X | X | | | | X | | | | | | | | | | X | | | | | | | X | X | | | | | | | | | | | 11 | 7 | ![](samples/11/clu11-sample0.png) | ![](samples/11/clu11-sample1.png) | ![](samples/11/clu11-sample2.png) | ![](samples/11/clu11-sample3.png) | ![](samples/11/clu11-sample4.png) | X | X | | | X | X | | | | X | X | | X | | | | X | | | | | | | | | | | | | | | | X | X | | | | | | | | | 12 | 6 | ![](samples/12/clu12-sample0.png) | ![](samples/12/clu12-sample1.png) | ![](samples/12/clu12-sample2.png) | ![](samples/12/clu12-sample3.png) | ![](samples/12/clu12-sample4.png) | | X | | | X | X | | | | | | | | | | | X | | | | | | | | | | | | | | | | | X | X | | | | | | | | 13 | 5 | ![](samples/13/clu13-sample0.png) | ![](samples/13/clu13-sample1.png) | ![](samples/13/clu13-sample2.png) | ![](samples/13/clu13-sample3.png) | ![](samples/13/clu13-sample4.png) | | | | X | X | X | | | | | X | | X | | X | | X | | | | | | | | | X | | X | | | | | X | | X | X | | | | | | | 14 | 7 | ![](samples/14/clu14-sample0.png) | ![](samples/14/clu14-sample1.png) | ![](samples/14/clu14-sample2.png) | ![](samples/14/clu14-sample3.png) | ![](samples/14/clu14-sample4.png) | X | | | | | | X | | | | X | | X | | | | | | | | | | | | | | | | | X | | | | | | | X | X | X | X | X |
CyberHarem/aqua_konosuba
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2024-02-05T16:36:55+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-02-05T18:10:24+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of aqua/γ‚’γ‚―γ‚’ (Kono Subarashii Sekai ni Shukufuku wo!) ============================================================ This is the dataset of aqua/γ‚’γ‚―γ‚’ (Kono Subarashii Sekai ni Shukufuku wo!), containing 758 images and their tags. The core tags of this character are 'blue\_hair, long\_hair, hair\_ornament, hair\_rings, blue\_eyes, bow, green\_bow', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
5b7c7fca6f40c890ab38b0effbcd5621b73d272b
# Dataset of lalatina_dustiness_ford/ダクネス (Kono Subarashii Sekai ni Shukufuku wo!) This is the dataset of lalatina_dustiness_ford/ダクネス (Kono Subarashii Sekai ni Shukufuku wo!), containing 451 images and their tags. The core tags of this character are `blonde_hair, long_hair, hair_ornament, ponytail, x_hair_ornament, blue_eyes, breasts`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:----------------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 451 | 408.05 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lalatina_dustiness_ford_konosuba/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 451 | 321.01 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lalatina_dustiness_ford_konosuba/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 942 | 616.34 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lalatina_dustiness_ford_konosuba/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 451 | 407.89 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lalatina_dustiness_ford_konosuba/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 942 | 756.29 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lalatina_dustiness_ford_konosuba/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/lalatina_dustiness_ford_konosuba', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:------------------------------------------------------------------------------------------------------------| | 0 | 19 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 2girls, parody, armor, anime_coloring, gloves, blue_hair | | 1 | 7 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 2girls, armor, blush, open_mouth, smile, gloves, 1boy, parody | | 2 | 8 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, armor, gloves, parody, solo, open_mouth | | 3 | 7 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, anime_coloring, armor, parody, solo | | 4 | 6 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, armor, open_mouth, solo, sword, parody, anime_coloring | | 5 | 11 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, armor, sword, solo, style_parody, black_gloves | | 6 | 10 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 1girl, armor, solo, holding_sword, gloves, parody, anime_coloring | | 7 | 6 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | 1girl, blush, completely_nude, large_breasts, solo, window, barefoot, collarbone, covering_breasts, indoors | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 2girls | parody | armor | anime_coloring | gloves | blue_hair | blush | open_mouth | smile | 1boy | 1girl | solo | sword | style_parody | black_gloves | holding_sword | completely_nude | large_breasts | window | barefoot | collarbone | covering_breasts | indoors | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:---------|:---------|:--------|:-----------------|:---------|:------------|:--------|:-------------|:--------|:-------|:--------|:-------|:--------|:---------------|:---------------|:----------------|:------------------|:----------------|:---------|:-----------|:-------------|:-------------------|:----------| | 0 | 19 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | 1 | 7 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | | X | | X | X | X | X | | | | | | | | | | | | | | | 2 | 8 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | | X | X | | X | | | X | | | X | X | | | | | | | | | | | | | 3 | 7 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | | X | X | X | | | | | | | X | X | | | | | | | | | | | | | 4 | 6 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | | X | X | X | | | | X | | | X | X | X | | | | | | | | | | | | 5 | 11 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | | | X | | | | | | | | X | X | X | X | X | | | | | | | | | | 6 | 10 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | | X | X | X | X | | | | | | X | X | | | | X | | | | | | | | | 7 | 6 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | | | | | | | X | | | | X | X | | | | | X | X | X | X | X | X | X |
CyberHarem/lalatina_dustiness_ford_konosuba
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2024-02-05T16:39:04+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-02-05T17:33:58+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of lalatina\_dustiness\_ford/ダクネス (Kono Subarashii Sekai ni Shukufuku wo!) ================================================================================== This is the dataset of lalatina\_dustiness\_ford/ダクネス (Kono Subarashii Sekai ni Shukufuku wo!), containing 451 images and their tags. The core tags of this character are 'blonde\_hair, long\_hair, hair\_ornament, ponytail, x\_hair\_ornament, blue\_eyes, breasts', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
2232509ec8aa23320a836e38ef9ade53a59f7ba0
--- license: apache-2.0 ---
luckeciano/reddit-features-hermes
[ "region:us" ]
2024-02-05T17:16:01+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "features_train_hermes_ft.csv"}, {"split": "test", "path": "features_test_hermes_ft.csv"}, {"split": "eval", "path": "features_eval_hermes_ft.csv"}, {"split": "ood", "path": "features_ood_hermes_ft.csv"}, {"split": "shuffled", "path": "features_shuffled_hermes_ft.csv"}]}]}
2024-02-13T16:13:34+00:00
[]
[]
TAGS #region-us
--- license: apache-2.0 ---
[]
[ "TAGS\n#region-us \n" ]
2d17443a8c1f91eb8df39bb88df72af5f47c380c
## Usage For testing purpose, you can use the hosted dummy dataset (`dummy_data`) as follows: ``` import datasets ds = datasets.load_dataset("ydshieh/coco_dataset_script", "2017", data_dir="./dummy_data/") ``` For using the COCO dataset (2017), you need to download it manually first: ``` wget http://images.cocodataset.org/zips/train2017.zip wget http://images.cocodataset.org/zips/val2017.zip wget http://images.cocodataset.org/zips/test2017.zip wget http://images.cocodataset.org/annotations/annotations_trainval2017.zip wget http://images.cocodataset.org/annotations/image_info_test2017.zip ``` Then to load the dataset: ``` COCO_DIR = ...(path to the downloaded dataset directory)... ds = datasets.load_dataset("ydshieh/coco_dataset_script", "2017", data_dir=COCO_DIR) ```
murjun/coco_2017
[ "region:us" ]
2024-02-05T17:26:48+00:00
{}
2024-02-05T17:54:11+00:00
[]
[]
TAGS #region-us
## Usage For testing purpose, you can use the hosted dummy dataset ('dummy_data') as follows: For using the COCO dataset (2017), you need to download it manually first: Then to load the dataset:
[ "## Usage\n\nFor testing purpose, you can use the hosted dummy dataset ('dummy_data') as follows:\n\n\n\nFor using the COCO dataset (2017), you need to download it manually first:\n\n\nThen to load the dataset:" ]
[ "TAGS\n#region-us \n", "## Usage\n\nFor testing purpose, you can use the hosted dummy dataset ('dummy_data') as follows:\n\n\n\nFor using the COCO dataset (2017), you need to download it manually first:\n\n\nThen to load the dataset:" ]
6e1d1d7492fe3dc1bb4a3f15e9f85919d1abac47
--- # Dataset Card for Car Crash Dataset ## Dataset Details - **NetID:** zm83 - **Repository:** Access the Car Crash Dataset on [Kaggle](https://www.kaggle.com/) - **License:** Apache 2.0 - **Expected Update Frequency:** Never (Last updated 22 days ago) ## Dataset Description The Car Crash Dataset provides a comprehensive collection of detailed records on traffic accidents from 2003 to 2015 in Monroe County. It offers insights into various factors influencing road accidents, including collision severity, weather conditions, road types, and other contributing factors. This dataset is crucial for analyses aimed at improving road safety and implementing preventive measures. ### Collaborators - Jackson Divakar R (Owner) ### Provenance Data for the Car Crash Dataset come from: - Official traffic incident reports - Law enforcement records - Insurance claims These sources ensure a comprehensive and accurate representation of the factors contributing to road accidents. ### Collection Methodology A consistent methodology was applied during the compilation of data from various sources to ensure the accuracy and reliability of the dataset. ## Dataset Summary The dataset is a meticulous aggregation of data points that delve into the factors influencing road accidents, documenting various aspects of car crashes. It is designed to facilitate the development of predictive models, safety analytics, and enhanced traffic management systems. ## Dataset Uses The Car Crash Dataset can be used for a variety of applications, including traffic safety analysis, urban planning, machine learning, policy development, public health, and insurance analysis. It supports in-depth research into the causes and consequences of road traffic accidents. ### Possible Topics for Analysis The dataset supports a range of analysis topics, such as: - The effect of weather conditions on road accidents - Correlations between traffic volume and accident frequency - The effectiveness of road safety laws and regulations - Geographic analysis of accident hotspots - Human factors versus environmental factors in accidents - Vehicle type and collision severity ### Curation Motivation The dataset was curated to provide a resource for stakeholders to understand and mitigate the factors behind road accidents, aiming to reduce their frequency and severity, and to support the development of road safety strategies. Researchers and analysts are invited to use this dataset to explore various topics and generate actionable insights for community safety and well-being. ### Data Instances A typical entry in the dataset might look like the following (example in JSON format): ```json { "accident_id": "XYZ123", "timestamp": "2015-08-21T14:30:00Z", "location": { "latitude": 43.1566, "longitude": -77.6088 }, "severity": "Moderate", "weather_condition": "Clear", "road_type": "Highway", "vehicles_involved": 2, "contributing_factors": ["Speeding", "Distracted Driving"] } ``` Additional fields in the dataset may include but are not limited to: ```jason { "injury_types": ["None", "Minor", "Severe"], "involved_parties": { "drivers": [ { "age": 35, "gender": "Female", "driving_experience": "10 years" }, { "age": 22, "gender": "Male", "driving_experience": "2 years" } ], "pedestrians": [] }, "collision_type": "Rear-end", "law_enforcement_response": { "response_time": "5 minutes", "actions_taken": ["Traffic control", "Medical assistance"] } } ```
Ziyuan111/traffic_accident_data
[ "task_categories:table-question-answering", "size_categories:1K<n<10K", "language:en", "license:apache-2.0", "region:us" ]
2024-02-05T17:28:32+00:00
{"language": ["en"], "license": "apache-2.0", "size_categories": ["1K<n<10K"], "task_categories": ["table-question-answering"]}
2024-02-06T21:45:09+00:00
[]
[ "en" ]
TAGS #task_categories-table-question-answering #size_categories-1K<n<10K #language-English #license-apache-2.0 #region-us
--- # Dataset Card for Car Crash Dataset ## Dataset Details - NetID: zm83 - Repository: Access the Car Crash Dataset on Kaggle - License: Apache 2.0 - Expected Update Frequency: Never (Last updated 22 days ago) ## Dataset Description The Car Crash Dataset provides a comprehensive collection of detailed records on traffic accidents from 2003 to 2015 in Monroe County. It offers insights into various factors influencing road accidents, including collision severity, weather conditions, road types, and other contributing factors. This dataset is crucial for analyses aimed at improving road safety and implementing preventive measures. ### Collaborators - Jackson Divakar R (Owner) ### Provenance Data for the Car Crash Dataset come from: - Official traffic incident reports - Law enforcement records - Insurance claims These sources ensure a comprehensive and accurate representation of the factors contributing to road accidents. ### Collection Methodology A consistent methodology was applied during the compilation of data from various sources to ensure the accuracy and reliability of the dataset. ## Dataset Summary The dataset is a meticulous aggregation of data points that delve into the factors influencing road accidents, documenting various aspects of car crashes. It is designed to facilitate the development of predictive models, safety analytics, and enhanced traffic management systems. ## Dataset Uses The Car Crash Dataset can be used for a variety of applications, including traffic safety analysis, urban planning, machine learning, policy development, public health, and insurance analysis. It supports in-depth research into the causes and consequences of road traffic accidents. ### Possible Topics for Analysis The dataset supports a range of analysis topics, such as: - The effect of weather conditions on road accidents - Correlations between traffic volume and accident frequency - The effectiveness of road safety laws and regulations - Geographic analysis of accident hotspots - Human factors versus environmental factors in accidents - Vehicle type and collision severity ### Curation Motivation The dataset was curated to provide a resource for stakeholders to understand and mitigate the factors behind road accidents, aiming to reduce their frequency and severity, and to support the development of road safety strategies. Researchers and analysts are invited to use this dataset to explore various topics and generate actionable insights for community safety and well-being. ### Data Instances A typical entry in the dataset might look like the following (example in JSON format): Additional fields in the dataset may include but are not limited to:
[ "# Dataset Card for Car Crash Dataset", "## Dataset Details\n\n- NetID: zm83\n- Repository: Access the Car Crash Dataset on Kaggle\n- License: Apache 2.0\n- Expected Update Frequency: Never (Last updated 22 days ago)", "## Dataset Description\n\nThe Car Crash Dataset provides a comprehensive collection of detailed records on traffic accidents from 2003 to 2015 in Monroe County. It offers insights into various factors influencing road accidents, including collision severity, weather conditions, road types, and other contributing factors. This dataset is crucial for analyses aimed at improving road safety and implementing preventive measures.", "### Collaborators\n\n- Jackson Divakar R (Owner)", "### Provenance\n\nData for the Car Crash Dataset come from:\n- Official traffic incident reports\n- Law enforcement records\n- Insurance claims\n\nThese sources ensure a comprehensive and accurate representation of the factors contributing to road accidents.", "### Collection Methodology\n\nA consistent methodology was applied during the compilation of data from various sources to ensure the accuracy and reliability of the dataset.", "## Dataset Summary\n\nThe dataset is a meticulous aggregation of data points that delve into the factors influencing road accidents, documenting various aspects of car crashes. It is designed to facilitate the development of predictive models, safety analytics, and enhanced traffic management systems.", "## Dataset Uses\n\nThe Car Crash Dataset can be used for a variety of applications, including traffic safety analysis, urban planning, machine learning, policy development, public health, and insurance analysis. It supports in-depth research into the causes and consequences of road traffic accidents.", "### Possible Topics for Analysis\n\nThe dataset supports a range of analysis topics, such as:\n- The effect of weather conditions on road accidents\n- Correlations between traffic volume and accident frequency\n- The effectiveness of road safety laws and regulations\n- Geographic analysis of accident hotspots\n- Human factors versus environmental factors in accidents\n- Vehicle type and collision severity", "### Curation Motivation\n\nThe dataset was curated to provide a resource for stakeholders to understand and mitigate the factors behind road accidents, aiming to reduce their frequency and severity, and to support the development of road safety strategies. Researchers and analysts are invited to use this dataset to explore various topics and generate actionable insights for community safety and well-being.", "### Data Instances\n\nA typical entry in the dataset might look like the following (example in JSON format):\n\n\n\nAdditional fields in the dataset may include but are not limited to:" ]
[ "TAGS\n#task_categories-table-question-answering #size_categories-1K<n<10K #language-English #license-apache-2.0 #region-us \n", "# Dataset Card for Car Crash Dataset", "## Dataset Details\n\n- NetID: zm83\n- Repository: Access the Car Crash Dataset on Kaggle\n- License: Apache 2.0\n- Expected Update Frequency: Never (Last updated 22 days ago)", "## Dataset Description\n\nThe Car Crash Dataset provides a comprehensive collection of detailed records on traffic accidents from 2003 to 2015 in Monroe County. It offers insights into various factors influencing road accidents, including collision severity, weather conditions, road types, and other contributing factors. This dataset is crucial for analyses aimed at improving road safety and implementing preventive measures.", "### Collaborators\n\n- Jackson Divakar R (Owner)", "### Provenance\n\nData for the Car Crash Dataset come from:\n- Official traffic incident reports\n- Law enforcement records\n- Insurance claims\n\nThese sources ensure a comprehensive and accurate representation of the factors contributing to road accidents.", "### Collection Methodology\n\nA consistent methodology was applied during the compilation of data from various sources to ensure the accuracy and reliability of the dataset.", "## Dataset Summary\n\nThe dataset is a meticulous aggregation of data points that delve into the factors influencing road accidents, documenting various aspects of car crashes. It is designed to facilitate the development of predictive models, safety analytics, and enhanced traffic management systems.", "## Dataset Uses\n\nThe Car Crash Dataset can be used for a variety of applications, including traffic safety analysis, urban planning, machine learning, policy development, public health, and insurance analysis. It supports in-depth research into the causes and consequences of road traffic accidents.", "### Possible Topics for Analysis\n\nThe dataset supports a range of analysis topics, such as:\n- The effect of weather conditions on road accidents\n- Correlations between traffic volume and accident frequency\n- The effectiveness of road safety laws and regulations\n- Geographic analysis of accident hotspots\n- Human factors versus environmental factors in accidents\n- Vehicle type and collision severity", "### Curation Motivation\n\nThe dataset was curated to provide a resource for stakeholders to understand and mitigate the factors behind road accidents, aiming to reduce their frequency and severity, and to support the development of road safety strategies. Researchers and analysts are invited to use this dataset to explore various topics and generate actionable insights for community safety and well-being.", "### Data Instances\n\nA typical entry in the dataset might look like the following (example in JSON format):\n\n\n\nAdditional fields in the dataset may include but are not limited to:" ]
e8b65bd3cc31caddea8bb1e4d594dfc5983b37b3
A dataset of visual questions for data science.
eduvedras/Data_Science_Questions
[ "region:us" ]
2024-02-05T17:56:11+00:00
{}
2024-02-06T11:33:55+00:00
[]
[]
TAGS #region-us
A dataset of visual questions for data science.
[]
[ "TAGS\n#region-us \n" ]
6278e5bc8ea3a2f216faa03a3f12ec5b6645b7c6
This is a chatml formatted version of original [SlimOrca-Dedup dataset](https://huggingface.co/datasets/Open-Orca/SlimOrca-Dedup) with few modifications to the system prompts.
Radiantloom/slimorca-dedup-chatml
[ "license:apache-2.0", "code", "art", "music", "legal", "finance", "biology", "chemistry", "SlimOrca", "ChatML", "region:us" ]
2024-02-05T18:22:18+00:00
{"license": "apache-2.0", "dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1241074844, "num_examples": 363491}], "download_size": 642543249, "dataset_size": 1241074844}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "tags": ["code", "art", "music", "legal", "finance", "biology", "chemistry", "SlimOrca", "ChatML"]}
2024-02-17T14:58:56+00:00
[]
[]
TAGS #license-apache-2.0 #code #art #music #legal #finance #biology #chemistry #SlimOrca #ChatML #region-us
This is a chatml formatted version of original SlimOrca-Dedup dataset with few modifications to the system prompts.
[]
[ "TAGS\n#license-apache-2.0 #code #art #music #legal #finance #biology #chemistry #SlimOrca #ChatML #region-us \n" ]
795476c72079a8a4a36daa55ac8fab0e6a548c2d
# Hungarian llm testing This is a really simple data-set to test fine-tuning a language model on Hungarian text.
Bazsalanszky/hungarian-llm-testing
[ "language:hu", "license:mit", "region:us" ]
2024-02-05T19:00:39+00:00
{"language": ["hu"], "license": "mit", "dataset_info": {"language": ["hu"]}}
2024-02-08T08:18:09+00:00
[]
[ "hu" ]
TAGS #language-Hungarian #license-mit #region-us
# Hungarian llm testing This is a really simple data-set to test fine-tuning a language model on Hungarian text.
[ "# Hungarian llm testing\n\nThis is a really simple data-set to test fine-tuning a language model on Hungarian text." ]
[ "TAGS\n#language-Hungarian #license-mit #region-us \n", "# Hungarian llm testing\n\nThis is a really simple data-set to test fine-tuning a language model on Hungarian text." ]
9f685679eb8ef00df86a44ba597eeafd46cb0935
MRQA Loaded without SearchQA ! -> Size : 450309 Remove duplicates by string match -> Before : 450309 | After : 401207 Before context preprocess: 401207 After context preprocess: 381972 Before split: 381972 After split: 378213 After context length filtering: 233328 After answer length filtering: 222697 Remove duplicates by similarity-> Before : 222697 | After : 208017
Atipico1/mrqa_preprocessed_thres-0.9_by-st
[ "region:us" ]
2024-02-05T19:38:01+00:00
{"dataset_info": {"features": [{"name": "subset", "dtype": "string"}, {"name": "qid", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answers", "sequence": "string"}, {"name": "masked_query", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "answer_sent", "dtype": "string"}, {"name": "answer_in_context", "sequence": "string"}, {"name": "query_embedding", "sequence": "float32"}], "splits": [{"name": "train", "num_bytes": 838499490.2996268, "num_examples": 208017}], "download_size": 873260263, "dataset_size": 838499490.2996268}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-06T05:24:52+00:00
[]
[]
TAGS #region-us
MRQA Loaded without SearchQA ! -> Size : 450309 Remove duplicates by string match -> Before : 450309 | After : 401207 Before context preprocess: 401207 After context preprocess: 381972 Before split: 381972 After split: 378213 After context length filtering: 233328 After answer length filtering: 222697 Remove duplicates by similarity-> Before : 222697 | After : 208017
[]
[ "TAGS\n#region-us \n" ]
f2566cdef3b6720f9da0adf034aefcd710f871d3
# WebLINX: Real-World Website Navigation with Multi-Turn Dialogue WARNING: This is not the main WebLINX data card! You might want to use the main WebLINX data card instead: > **[WebLINX: Real-World Website Navigation with Multi-Turn Dialogue](https://huggingface.co/datasets/mcgill-nlp/weblinx)**
McGill-NLP/WebLINX-full
[ "size_categories:10K<n<100K", "language:en", "conversational", "image-to-text", "vision", "convAI", "region:us" ]
2024-02-05T20:12:12+00:00
{"language": ["en"], "size_categories": ["10K<n<100K"], "config_names": ["chat"], "configs": [{"config_name": "chat", "default": true, "data_files": [{"split": "train", "path": "chat/train.csv"}, {"split": "validation", "path": "chat/valid.csv"}, {"split": "test", "path": "chat/test_iid.csv"}, {"split": "test_geo", "path": "chat/test_geo.csv"}, {"split": "test_vis", "path": "chat/test_vis.csv"}, {"split": "test_cat", "path": "chat/test_cat.csv"}, {"split": "test_web", "path": "chat/test_web.csv"}]}], "tags": ["conversational", "image-to-text", "vision", "convAI"]}
2024-02-14T01:31:35+00:00
[]
[ "en" ]
TAGS #size_categories-10K<n<100K #language-English #conversational #image-to-text #vision #convAI #region-us
# WebLINX: Real-World Website Navigation with Multi-Turn Dialogue WARNING: This is not the main WebLINX data card! You might want to use the main WebLINX data card instead: > WebLINX: Real-World Website Navigation with Multi-Turn Dialogue
[ "# WebLINX: Real-World Website Navigation with Multi-Turn Dialogue\n\nWARNING: This is not the main WebLINX data card! You might want to use the main WebLINX data card instead: \n\n> WebLINX: Real-World Website Navigation with Multi-Turn Dialogue" ]
[ "TAGS\n#size_categories-10K<n<100K #language-English #conversational #image-to-text #vision #convAI #region-us \n", "# WebLINX: Real-World Website Navigation with Multi-Turn Dialogue\n\nWARNING: This is not the main WebLINX data card! You might want to use the main WebLINX data card instead: \n\n> WebLINX: Real-World Website Navigation with Multi-Turn Dialogue" ]
8b0446a23aa74cb8185c86cd9dcbe739bdf2acaa
# Dataset Card for "VNTL-v3-1k" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
lmg-anon/VNTL-v3-1k
[ "region:us" ]
2024-02-05T20:45:11+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "val", "path": "data/val-*"}]}], "dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "ignore_loss", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 26306600, "num_examples": 10939}, {"name": "val", "num_bytes": 3872937, "num_examples": 1639}], "download_size": 13652180, "dataset_size": 30179537}}
2024-02-07T05:35:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for "VNTL-v3-1k" More Information needed
[ "# Dataset Card for \"VNTL-v3-1k\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"VNTL-v3-1k\"\n\nMore Information needed" ]
d43a0130bc74bc869c95cb553988695923b95b8d
- 32.579 texts in total, 14.012 NOT hateful texts and 18.567 HATEFUL texts - All duplicate values were removed - Split using sklearn into 80% train and 20% temporary test (stratified label). Then split the test set using 0.50% test and validation (stratified label) - Split: 80/10/10 - Train set label distribution: 0 ==> 11.210, 1 ==> 14.853, 26.063 in total - Validation set label distribution: 0 ==> 1.401, 1 ==> 1.857, 3.258 in total - Test set label distribution: 0 ==> 1.401, 1 ==> 1.857, 3.258 in total - Combination of 4 publicly available datasets: - 1. "Ethos" dataset (Mollas et al., 2022) - 2. HateCheck: Functional Tests for Hate Speech Detection Models (Rottger et al., 2021) - 3. A Benchmark Dataset for Learning to Intervene in Online Hate Speech (Qian et al., 2019) - 4. Automated Hate Speech Detection and the Problem of Offensive Language (Davidson, et al., 2017)
christinacdl/hate_speech_dataset
[ "task_categories:text-classification", "language:en", "license:apache-2.0", "region:us" ]
2024-02-05T21:03:54+00:00
{"language": ["en"], "license": "apache-2.0", "task_categories": ["text-classification"]}
2024-02-08T17:41:03+00:00
[]
[ "en" ]
TAGS #task_categories-text-classification #language-English #license-apache-2.0 #region-us
- 32.579 texts in total, 14.012 NOT hateful texts and 18.567 HATEFUL texts - All duplicate values were removed - Split using sklearn into 80% train and 20% temporary test (stratified label). Then split the test set using 0.50% test and validation (stratified label) - Split: 80/10/10 - Train set label distribution: 0 ==> 11.210, 1 ==> 14.853, 26.063 in total - Validation set label distribution: 0 ==> 1.401, 1 ==> 1.857, 3.258 in total - Test set label distribution: 0 ==> 1.401, 1 ==> 1.857, 3.258 in total - Combination of 4 publicly available datasets: - 1. "Ethos" dataset (Mollas et al., 2022) - 2. HateCheck: Functional Tests for Hate Speech Detection Models (Rottger et al., 2021) - 3. A Benchmark Dataset for Learning to Intervene in Online Hate Speech (Qian et al., 2019) - 4. Automated Hate Speech Detection and the Problem of Offensive Language (Davidson, et al., 2017)
[]
[ "TAGS\n#task_categories-text-classification #language-English #license-apache-2.0 #region-us \n" ]
f113ca599d6d9ae0848950c2730ab2f52b80d746
MRQA Loaded without SearchQA ! -> Size : 450309 Remove duplicates by string match -> Before : 450309 | After : 401207 Before context preprocess: 401207 After context preprocess: 381972 Before split: 381972 After split: 378213 After context length filtering: 233328 After answer length filtering: 222697 Remove duplicates by similarity-> Before : 222697 | After : 204348
Atipico1/mrqa_preprocessed_thres-0.95_by-dpr
[ "region:us" ]
2024-02-05T21:29:41+00:00
{"dataset_info": {"features": [{"name": "subset", "dtype": "string"}, {"name": "qid", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answers", "sequence": "string"}, {"name": "masked_query", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "answer_sent", "dtype": "string"}, {"name": "answer_in_context", "sequence": "string"}, {"name": "query_embedding", "sequence": "float32"}], "splits": [{"name": "train", "num_bytes": 823710051.792633, "num_examples": 204348}], "download_size": 858780623, "dataset_size": 823710051.792633}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-06T05:25:33+00:00
[]
[]
TAGS #region-us
MRQA Loaded without SearchQA ! -> Size : 450309 Remove duplicates by string match -> Before : 450309 | After : 401207 Before context preprocess: 401207 After context preprocess: 381972 Before split: 381972 After split: 378213 After context length filtering: 233328 After answer length filtering: 222697 Remove duplicates by similarity-> Before : 222697 | After : 204348
[]
[ "TAGS\n#region-us \n" ]
63e73cbc9edc7d0a2689ad64dc4caeeb8b0b4bfb
# Dataset Card for "formal-logic-simple-order-new-objects-paired-bigger-5000" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
pccl-org/formal-logic-simple-order-new-objects-paired-bigger-5000
[ "region:us" ]
2024-02-05T21:34:36+00:00
{"dataset_info": {"features": [{"name": "greater_than", "dtype": "string"}, {"name": "less_than", "dtype": "string"}, {"name": "paired_example", "sequence": {"sequence": "string"}}, {"name": "correct_example", "sequence": "string"}, {"name": "incorrect_example", "sequence": "string"}, {"name": "distance", "dtype": "int64"}, {"name": "index", "dtype": "int64"}, {"name": "index_in_distance", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 3164761142, "num_examples": 12487503}], "download_size": 1119912046, "dataset_size": 3164761142}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-05T21:35:57+00:00
[]
[]
TAGS #region-us
# Dataset Card for "formal-logic-simple-order-new-objects-paired-bigger-5000" More Information needed
[ "# Dataset Card for \"formal-logic-simple-order-new-objects-paired-bigger-5000\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"formal-logic-simple-order-new-objects-paired-bigger-5000\"\n\nMore Information needed" ]
ce58dd895f01c0f341d357c58c83fb6c3f04f2c0
# Dataset Card for "EGY_BCD" This is an unofficial repo for the change detection dataset EGY-BCD. The dataset was randomly (seed=8888) split into subsets of train/val/test with a ratio of 6:2:2. [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ericyu/EGY_BCD
[ "region:us" ]
2024-02-05T21:47:15+00:00
{"dataset_info": {"features": [{"name": "imageA", "dtype": "image"}, {"name": "imageB", "dtype": "image"}, {"name": "label", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 685483069.3837136, "num_examples": 3654}, {"name": "test", "num_bytes": 226848178.30523786, "num_examples": 1218}, {"name": "val", "num_bytes": 228364798.69204846, "num_examples": 1219}], "download_size": 1135172308, "dataset_size": 1140696046.381}}
2024-02-05T21:54:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for "EGY_BCD" This is an unofficial repo for the change detection dataset EGY-BCD. The dataset was randomly (seed=8888) split into subsets of train/val/test with a ratio of 6:2:2. More Information needed
[ "# Dataset Card for \"EGY_BCD\"\n\nThis is an unofficial repo for the change detection dataset EGY-BCD. The dataset was randomly (seed=8888) split into subsets of train/val/test with a ratio of 6:2:2.\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"EGY_BCD\"\n\nThis is an unofficial repo for the change detection dataset EGY-BCD. The dataset was randomly (seed=8888) split into subsets of train/val/test with a ratio of 6:2:2.\n\nMore Information needed" ]
4de48c27cc912d724297a4832c9de36e3cad28b3
# Dataset Card for "CNAMCD_Cropped" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ericyu/CNAMCD_Cropped
[ "region:us" ]
2024-02-05T22:02:31+00:00
{"dataset_info": {"features": [{"name": "imageA", "dtype": "image"}, {"name": "imageB", "dtype": "image"}, {"name": "label", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 144417263.7991308, "num_examples": 6019}, {"name": "test", "num_bytes": 48131017.86226156, "num_examples": 2006}, {"name": "val", "num_bytes": 48155065.57960765, "num_examples": 2007}], "download_size": 239525611, "dataset_size": 240703347.241}}
2024-02-05T22:03:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for "CNAMCD_Cropped" More Information needed
[ "# Dataset Card for \"CNAMCD_Cropped\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"CNAMCD_Cropped\"\n\nMore Information needed" ]
a3e52403751f52b454d5a6a841ca6f742285c210
# Dataset Card for "UltraMix" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
bcui19/UltraMix
[ "region:us" ]
2024-02-05T22:25:30+00:00
{"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "response", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 108959181, "num_examples": 41457}], "download_size": 54577284, "dataset_size": 108959181}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-05T22:25:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for "UltraMix" More Information needed
[ "# Dataset Card for \"UltraMix\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"UltraMix\"\n\nMore Information needed" ]
769b3478be8ac36ed9c59763673462be94077973
# Dataset Card for "WritingPrompts" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
llm-aes/writing-prompts
[ "region:us" ]
2024-02-05T22:29:33+00:00
{"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 32297322, "num_examples": 232360}, {"name": "sample_length_10_to_292", "num_bytes": 135779, "num_examples": 1000}], "download_size": 20998349, "dataset_size": 32433101}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "sample_length_10_to_292", "path": "data/sample_length_10_to_292-*"}]}]}
2024-02-05T22:41:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for "WritingPrompts" More Information needed
[ "# Dataset Card for \"WritingPrompts\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"WritingPrompts\"\n\nMore Information needed" ]
1accdf18a190fd43e1546dea1fb53449fe0f78a8
# 2015-2016 Raw Tracking Data from SportVU The modern era of basketball is characterized by the use of data to analyze performance and make decisions both on and off the court. ## Dataset Details ### Dataset Descriptions Tracking data is the finest level of basketball data, whereas play-by-play and box score data are also used. This dataset gives raw SportVU tracking data from each game of the 2015-2016 NBA season. This was the last season with publically available tracking data. This data has the coordinates of all players at all moments of the game, for each game in the season. There are also identifiers for player id and team id, so that further analysis can be performed. - **Collected By:** SportVU - **Shared By:** Kostya Linou, Dzmitryi Linou, Martijn De Boer ### Dataset Source - **Repository:** https://github.com/linouk23/NBA-Player-Movements ## Uses This dataset has many potential uses. Primarily, visualization of plays, as illustrated in the initial repository is possible, creating a comprehensive view for analyzing actions on court. Beyond that, models could be trained to recognize certain play types or actions, as illustrated in previous papers (see Stephanos et al., 2022). Even further, video data could be connected to each moment of collection to create a model where video frames are mapped to tracked coordinates, increasing the accessibility of tracking data as only publically available video footage is necessary. - Stephanos et al.: https://www.sloansportsconference.com/research-papers/using-hex-maps-to-classify-cluster-dribble-hand-off-variants-in-the-nba ## Dataset Structure <!-- complete once data is uploaded, so that one can understand how it is formatted. --> ## Dataset Creation ### Curation Rationale The reason for uploading this data to huggingface, is that in its current .7z form, the data is less accessible, and requires unzipping many files and then combining to access. Also, more sources for easily accessible tracking data, even if also available elsewhere, increase the chances of long-term preservation and accessibility for future NBA fans. ### Source Data From creator StatsPerform, "the SportVU camera system is installed in basketball arenas to track the real-time positions of players and the ball at 25 times per second." These methods were used to capture the data in this dataset. ## Bias, Risks, and Limitations Since this data is not up-to-date, and the tracking data for the last eight seasons is private and unreleased, the continued spread of this specific data may not be representative of the current state of NBA tracking data. Thus, users that learn how to manipulate it may not be adequately prepared for work in basketball organizations. Further, analyses performed on the dataset may not be reflective of the current state of professional basketball. However, since this was the last iteration of publicly available tracking data, I believe increasing its availability is important. ## Dataset Card Author Donald Cayton; [email protected]
dcayton/nba_tracking_data_15_16
[ "source_datasets:https://github.com/linouk23/NBA-Player-Movements", "language:en", "basketball", "nba", "sports", "tracking", "region:us" ]
2024-02-05T22:57:34+00:00
{"language": ["en"], "source_datasets": "https://github.com/linouk23/NBA-Player-Movements", "pretty_name": "2015-2016 Raw Tracking Data from SportVU", "tags": ["basketball", "nba", "sports", "tracking"]}
2024-02-16T22:28:07+00:00
[]
[ "en" ]
TAGS #source_datasets-https-//github.com/linouk23/NBA-Player-Movements #language-English #basketball #nba #sports #tracking #region-us
# 2015-2016 Raw Tracking Data from SportVU The modern era of basketball is characterized by the use of data to analyze performance and make decisions both on and off the court. ## Dataset Details ### Dataset Descriptions Tracking data is the finest level of basketball data, whereas play-by-play and box score data are also used. This dataset gives raw SportVU tracking data from each game of the 2015-2016 NBA season. This was the last season with publically available tracking data. This data has the coordinates of all players at all moments of the game, for each game in the season. There are also identifiers for player id and team id, so that further analysis can be performed. - Collected By: SportVU - Shared By: Kostya Linou, Dzmitryi Linou, Martijn De Boer ### Dataset Source - Repository: URL ## Uses This dataset has many potential uses. Primarily, visualization of plays, as illustrated in the initial repository is possible, creating a comprehensive view for analyzing actions on court. Beyond that, models could be trained to recognize certain play types or actions, as illustrated in previous papers (see Stephanos et al., 2022). Even further, video data could be connected to each moment of collection to create a model where video frames are mapped to tracked coordinates, increasing the accessibility of tracking data as only publically available video footage is necessary. - Stephanos et al.: URL ## Dataset Structure ## Dataset Creation ### Curation Rationale The reason for uploading this data to huggingface, is that in its current .7z form, the data is less accessible, and requires unzipping many files and then combining to access. Also, more sources for easily accessible tracking data, even if also available elsewhere, increase the chances of long-term preservation and accessibility for future NBA fans. ### Source Data From creator StatsPerform, "the SportVU camera system is installed in basketball arenas to track the real-time positions of players and the ball at 25 times per second." These methods were used to capture the data in this dataset. ## Bias, Risks, and Limitations Since this data is not up-to-date, and the tracking data for the last eight seasons is private and unreleased, the continued spread of this specific data may not be representative of the current state of NBA tracking data. Thus, users that learn how to manipulate it may not be adequately prepared for work in basketball organizations. Further, analyses performed on the dataset may not be reflective of the current state of professional basketball. However, since this was the last iteration of publicly available tracking data, I believe increasing its availability is important. ## Dataset Card Author Donald Cayton; dcayton9@URL
[ "# 2015-2016 Raw Tracking Data from SportVU\n\nThe modern era of basketball is characterized by the use of data to analyze performance and make decisions both on and off the court.", "## Dataset Details", "### Dataset Descriptions\n\nTracking data is the finest level of basketball data, whereas play-by-play and box score data are also used. This dataset gives raw SportVU tracking data from each game of the 2015-2016 NBA season. This was the last season with publically available tracking data. This data has the coordinates of all players at all moments of the game, for each game in the season. There are also identifiers for player id and team id, so that further analysis can be performed.\n- Collected By: SportVU\n- Shared By: Kostya Linou, Dzmitryi Linou, Martijn De Boer", "### Dataset Source\n\n- Repository: URL", "## Uses\n\nThis dataset has many potential uses. Primarily, visualization of plays, as illustrated in the initial repository is possible, creating a comprehensive view for analyzing actions on court. Beyond that, models could be trained to recognize certain play types or actions, as illustrated in previous papers (see Stephanos et al., 2022). Even further, video data could be connected to each moment of collection to create a model where video frames are mapped to tracked coordinates, increasing the accessibility of tracking data as only publically available video footage is necessary. \n\n- Stephanos et al.: URL", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale\n\nThe reason for uploading this data to huggingface, is that in its current .7z form, the data is less accessible, and requires unzipping many files and then combining to access. Also, more sources for easily accessible tracking data, even if also available elsewhere, increase the chances of long-term preservation and accessibility for future NBA fans.", "### Source Data\n\nFrom creator StatsPerform, \"the SportVU camera system is installed in basketball arenas to track the real-time positions of players and the ball at 25 times per second.\" These methods were used to capture the data in this dataset.", "## Bias, Risks, and Limitations\n\nSince this data is not up-to-date, and the tracking data for the last eight seasons is private and unreleased, the continued spread of this specific data may not be representative of the current state of NBA tracking data. Thus, users that learn how to manipulate it may not be adequately prepared for work in basketball organizations. Further, analyses performed on the dataset may not be reflective of the current state of professional basketball. However, since this was the last iteration of publicly available tracking data, I believe increasing its availability is important.", "## Dataset Card Author\n\nDonald Cayton; dcayton9@URL" ]
[ "TAGS\n#source_datasets-https-//github.com/linouk23/NBA-Player-Movements #language-English #basketball #nba #sports #tracking #region-us \n", "# 2015-2016 Raw Tracking Data from SportVU\n\nThe modern era of basketball is characterized by the use of data to analyze performance and make decisions both on and off the court.", "## Dataset Details", "### Dataset Descriptions\n\nTracking data is the finest level of basketball data, whereas play-by-play and box score data are also used. This dataset gives raw SportVU tracking data from each game of the 2015-2016 NBA season. This was the last season with publically available tracking data. This data has the coordinates of all players at all moments of the game, for each game in the season. There are also identifiers for player id and team id, so that further analysis can be performed.\n- Collected By: SportVU\n- Shared By: Kostya Linou, Dzmitryi Linou, Martijn De Boer", "### Dataset Source\n\n- Repository: URL", "## Uses\n\nThis dataset has many potential uses. Primarily, visualization of plays, as illustrated in the initial repository is possible, creating a comprehensive view for analyzing actions on court. Beyond that, models could be trained to recognize certain play types or actions, as illustrated in previous papers (see Stephanos et al., 2022). Even further, video data could be connected to each moment of collection to create a model where video frames are mapped to tracked coordinates, increasing the accessibility of tracking data as only publically available video footage is necessary. \n\n- Stephanos et al.: URL", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale\n\nThe reason for uploading this data to huggingface, is that in its current .7z form, the data is less accessible, and requires unzipping many files and then combining to access. Also, more sources for easily accessible tracking data, even if also available elsewhere, increase the chances of long-term preservation and accessibility for future NBA fans.", "### Source Data\n\nFrom creator StatsPerform, \"the SportVU camera system is installed in basketball arenas to track the real-time positions of players and the ball at 25 times per second.\" These methods were used to capture the data in this dataset.", "## Bias, Risks, and Limitations\n\nSince this data is not up-to-date, and the tracking data for the last eight seasons is private and unreleased, the continued spread of this specific data may not be representative of the current state of NBA tracking data. Thus, users that learn how to manipulate it may not be adequately prepared for work in basketball organizations. Further, analyses performed on the dataset may not be reflective of the current state of professional basketball. However, since this was the last iteration of publicly available tracking data, I believe increasing its availability is important.", "## Dataset Card Author\n\nDonald Cayton; dcayton9@URL" ]
ec30cc48fd29d22ea9887a31317829e72ed7317b
# danbooru tag txt This is for non-AI developers. It's a simple list of tags in a human-friendly format. AI developers should use other datasets, including the output from this dataset. ## dataset Extracted from the following data set https://huggingface.co/datasets/AngelBottomless/danbooru-2023-sqlite-fixed-7110548
gegebo/danobooru2023_tag_txt
[ "license:mit", "region:us" ]
2024-02-05T23:10:12+00:00
{"license": "mit"}
2024-02-05T23:34:47+00:00
[]
[]
TAGS #license-mit #region-us
# danbooru tag txt This is for non-AI developers. It's a simple list of tags in a human-friendly format. AI developers should use other datasets, including the output from this dataset. ## dataset Extracted from the following data set URL
[ "# danbooru tag txt\n \nThis is for non-AI developers. \nIt's a simple list of tags in a human-friendly format. \n \nAI developers should use other datasets, including the output from this dataset.", "## dataset \n \nExtracted from the following data set \nURL" ]
[ "TAGS\n#license-mit #region-us \n", "# danbooru tag txt\n \nThis is for non-AI developers. \nIt's a simple list of tags in a human-friendly format. \n \nAI developers should use other datasets, including the output from this dataset.", "## dataset \n \nExtracted from the following data set \nURL" ]
a8466c84c59b8e1458821394983f43fc18e41adb
# Changes 1. Reformatted into ShareGPT. 2. Removed the few duplicate characters. 3. Removed samples with no messages. 4. Any messages where the character is talking in third person is fixed to be first person, as it should be.
PJMixers/hieunguyenminh_roleplay-ShareGPT
[ "source_datasets:hieunguyenminh/roleplay", "language:en", "region:us" ]
2024-02-06T00:53:06+00:00
{"language": ["en"], "source_datasets": "hieunguyenminh/roleplay"}
2024-02-06T14:20:42+00:00
[]
[ "en" ]
TAGS #source_datasets-hieunguyenminh/roleplay #language-English #region-us
# Changes 1. Reformatted into ShareGPT. 2. Removed the few duplicate characters. 3. Removed samples with no messages. 4. Any messages where the character is talking in third person is fixed to be first person, as it should be.
[ "# Changes\n1. Reformatted into ShareGPT.\n2. Removed the few duplicate characters.\n3. Removed samples with no messages.\n4. Any messages where the character is talking in third person is fixed to be first person, as it should be." ]
[ "TAGS\n#source_datasets-hieunguyenminh/roleplay #language-English #region-us \n", "# Changes\n1. Reformatted into ShareGPT.\n2. Removed the few duplicate characters.\n3. Removed samples with no messages.\n4. Any messages where the character is talking in third person is fixed to be first person, as it should be." ]
4cace9489f45a22fb0475f88686cb0a7f6c9a6d3
# Dataset Card for "EleutherAI_pythia-1b-deduped__dpo_on_policy__tldr" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
vwxyzjn/EleutherAI_pythia-1b-deduped__dpo_on_policy__tldr
[ "region:us" ]
2024-02-06T01:25:06+00:00
{"dataset_info": {"features": [{"name": "chosen", "dtype": "string"}, {"name": "rejected", "dtype": "string"}, {"name": "chosen_token", "sequence": "int64"}, {"name": "rejected_token", "sequence": "int64"}, {"name": "chosen_token_label", "sequence": "int64"}, {"name": "rejected_token_label", "sequence": "int64"}], "splits": [{"name": "dpo_on_policy__1__1707191080", "num_bytes": 5903392, "num_examples": 256}, {"name": "dpo_on_policy__1__1707191514", "num_bytes": 737346, "num_examples": 32}, {"name": "dpo_on_policy__1__1707191827", "num_bytes": 1474271, "num_examples": 64}, {"name": "dpo_on_policy__1__1707191954", "num_bytes": 5903392, "num_examples": 256}, {"name": "dpo_on_policy__1__1707192216", "num_bytes": 5903392, "num_examples": 256}, {"name": "dpo_on_policy__1__1707192515", "num_bytes": 5903178, "num_examples": 256}, {"name": "dpo_on_policy__1__1707200734", "num_bytes": 2686492433, "num_examples": 116480}, {"name": "dpo_on_policy__1__1707792349", "num_bytes": 2686492433, "num_examples": 116480}, {"name": "dpo_on_policy__1__1707792340", "num_bytes": 2686492433, "num_examples": 116480}, {"name": "epoch_1", "num_bytes": 2691157952, "num_examples": 116480}, {"name": "dpo_on_policy__1__1707795707", "num_bytes": 2686492433, "num_examples": 116480}, {"name": "epoch_2", "num_bytes": 2722175510, "num_examples": 116480}, {"name": "epoch_3", "num_bytes": 2690611469, "num_examples": 116480}, {"name": "dpo_on_policy__1__1707833448", "num_bytes": 2686492433, "num_examples": 116480}, {"name": "dpo_on_policy__1__1707833448epoch_1", "num_bytes": 2691512798, "num_examples": 116480}], "download_size": 3859313963, "dataset_size": 24253744865}}
2024-02-13T15:35:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for "EleutherAI_pythia-1b-deduped__dpo_on_policy__tldr" More Information needed
[ "# Dataset Card for \"EleutherAI_pythia-1b-deduped__dpo_on_policy__tldr\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"EleutherAI_pythia-1b-deduped__dpo_on_policy__tldr\"\n\nMore Information needed" ]
c1c0bfc74c1f517e7a2427deb2822539ab0f5965
# Dataset Card for "ShareMix" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
bcui19/ShareMix
[ "region:us" ]
2024-02-06T01:49:57+00:00
{"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "response", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 150572649, "num_examples": 58483}], "download_size": 79490926, "dataset_size": 150572649}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-06T01:50:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for "ShareMix" More Information needed
[ "# Dataset Card for \"ShareMix\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"ShareMix\"\n\nMore Information needed" ]
52eeeca738b76519e05240b75a101224bc993777
# Dataset Card for "FINCH_TRAIN_FULL" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
FINNUMBER/FINCH_TRAIN_TEMP
[ "region:us" ]
2024-02-06T01:51:46+00:00
{"dataset_info": {"features": [{"name": "task", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 285437762, "num_examples": 76609}], "download_size": 105566705, "dataset_size": 285437762}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-06T01:52:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for "FINCH_TRAIN_FULL" More Information needed
[ "# Dataset Card for \"FINCH_TRAIN_FULL\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"FINCH_TRAIN_FULL\"\n\nMore Information needed" ]
1a74c38ba21e9d67b9ed23a124624f3a213d0c1d
# Dataset Card for Evaluation run of TomGrc/FN-OpenLLM_2x72B_MoE <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [TomGrc/FN-OpenLLM_2x72B_MoE](https://huggingface.co/TomGrc/FN-OpenLLM_2x72B_MoE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TomGrc__FN-OpenLLM_2x72B_MoE", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-06T01:52:10.589662](https://huggingface.co/datasets/open-llm-leaderboard/details_TomGrc__FN-OpenLLM_2x72B_MoE/blob/main/results_2024-02-06T01-52-10.589662.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.22887713151218728, "acc_stderr": 0.02978691747050183, "acc_norm": 0.22891869657995814, "acc_norm_stderr": 0.03057236693631619, "mc1": 0.23255813953488372, "mc1_stderr": 0.014789157531080514, "mc2": 0.48471292342924077, "mc2_stderr": 0.016304873353404845 }, "harness|arc:challenge|25": { "acc": 0.2098976109215017, "acc_stderr": 0.011900548748047444, "acc_norm": 0.2551194539249147, "acc_norm_stderr": 0.012739038695202104 }, "harness|hellaswag|10": { "acc": 0.25562636924915355, "acc_stderr": 0.004353212146198434, "acc_norm": 0.2523401712806214, "acc_norm_stderr": 0.004334676952703859 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.25925925925925924, "acc_stderr": 0.03785714465066656, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.03785714465066656 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2339622641509434, "acc_stderr": 0.02605529690115292, "acc_norm": 0.2339622641509434, "acc_norm_stderr": 0.02605529690115292 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.17, "acc_stderr": 0.0377525168068637, "acc_norm": 0.17, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2023121387283237, "acc_stderr": 0.030631145539198813, "acc_norm": 0.2023121387283237, "acc_norm_stderr": 0.030631145539198813 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2482758620689655, "acc_stderr": 0.036001056927277716, "acc_norm": 0.2482758620689655, "acc_norm_stderr": 0.036001056927277716 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03333333333333337, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03333333333333337 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2358974358974359, "acc_stderr": 0.021525965407408726, "acc_norm": 0.2358974358974359, "acc_norm_stderr": 0.021525965407408726 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.30493273542600896, "acc_stderr": 0.030898610882477515, "acc_norm": 0.30493273542600896, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.22900763358778625, "acc_stderr": 0.036853466317118506, "acc_norm": 0.22900763358778625, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.24793388429752067, "acc_stderr": 0.039418975265163025, "acc_norm": 0.24793388429752067, "acc_norm_stderr": 0.039418975265163025 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.28703703703703703, "acc_stderr": 0.043733130409147614, "acc_norm": 0.28703703703703703, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.21359223300970873, "acc_stderr": 0.040580420156460344, "acc_norm": 0.21359223300970873, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.26495726495726496, "acc_stderr": 0.028911208802749482, "acc_norm": 0.26495726495726496, "acc_norm_stderr": 0.028911208802749482 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.24393358876117496, "acc_stderr": 0.015357212665829468, "acc_norm": 0.24393358876117496, "acc_norm_stderr": 0.015357212665829468 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.18006430868167203, "acc_stderr": 0.021823422857744953, "acc_norm": 0.18006430868167203, "acc_norm_stderr": 0.021823422857744953 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2434640522875817, "acc_stderr": 0.017362473762146634, "acc_norm": 0.2434640522875817, "acc_norm_stderr": 0.017362473762146634 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.23636363636363636, "acc_stderr": 0.04069306319721376, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.04069306319721376 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-virology|5": { "acc": 0.2891566265060241, "acc_stderr": 0.03529486801511115, "acc_norm": 0.2891566265060241, "acc_norm_stderr": 0.03529486801511115 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03218093795602357, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03218093795602357 }, "harness|truthfulqa:mc|0": { "mc1": 0.23255813953488372, "mc1_stderr": 0.014789157531080514, "mc2": 0.48471292342924077, "mc2_stderr": 0.016304873353404845 }, "harness|winogrande|5": { "acc": 0.4972375690607735, "acc_stderr": 0.014052271211616445 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_TomGrc__FN-OpenLLM_2x72B_MoE
[ "region:us" ]
2024-02-06T01:54:17+00:00
{"pretty_name": "Evaluation run of TomGrc/FN-OpenLLM_2x72B_MoE", "dataset_summary": "Dataset automatically created during the evaluation run of model [TomGrc/FN-OpenLLM_2x72B_MoE](https://huggingface.co/TomGrc/FN-OpenLLM_2x72B_MoE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TomGrc__FN-OpenLLM_2x72B_MoE\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-06T01:52:10.589662](https://huggingface.co/datasets/open-llm-leaderboard/details_TomGrc__FN-OpenLLM_2x72B_MoE/blob/main/results_2024-02-06T01-52-10.589662.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.22887713151218728,\n \"acc_stderr\": 0.02978691747050183,\n \"acc_norm\": 0.22891869657995814,\n \"acc_norm_stderr\": 0.03057236693631619,\n \"mc1\": 0.23255813953488372,\n \"mc1_stderr\": 0.014789157531080514,\n \"mc2\": 0.48471292342924077,\n \"mc2_stderr\": 0.016304873353404845\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.2098976109215017,\n \"acc_stderr\": 0.011900548748047444,\n \"acc_norm\": 0.2551194539249147,\n \"acc_norm_stderr\": 0.012739038695202104\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.25562636924915355,\n \"acc_stderr\": 0.004353212146198434,\n \"acc_norm\": 0.2523401712806214,\n \"acc_norm_stderr\": 0.004334676952703859\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.03785714465066656,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.03785714465066656\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2339622641509434,\n \"acc_stderr\": 0.02605529690115292,\n \"acc_norm\": 0.2339622641509434,\n \"acc_norm_stderr\": 0.02605529690115292\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.17,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2023121387283237,\n \"acc_stderr\": 0.030631145539198813,\n \"acc_norm\": 0.2023121387283237,\n \"acc_norm_stderr\": 0.030631145539198813\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2482758620689655,\n \"acc_stderr\": 0.036001056927277716,\n \"acc_norm\": 0.2482758620689655,\n \"acc_norm_stderr\": 0.036001056927277716\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.16666666666666666,\n \"acc_stderr\": 0.03333333333333337,\n \"acc_norm\": 0.16666666666666666,\n \"acc_norm_stderr\": 0.03333333333333337\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2358974358974359,\n \"acc_stderr\": 0.021525965407408726,\n \"acc_norm\": 0.2358974358974359,\n \"acc_norm_stderr\": 0.021525965407408726\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.30493273542600896,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.30493273542600896,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.22900763358778625,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.22900763358778625,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.24793388429752067,\n \"acc_stderr\": 0.039418975265163025,\n \"acc_norm\": 0.24793388429752067,\n \"acc_norm_stderr\": 0.039418975265163025\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.28703703703703703,\n \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.28703703703703703,\n \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.21359223300970873,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.21359223300970873,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.26495726495726496,\n \"acc_stderr\": 0.028911208802749482,\n \"acc_norm\": 0.26495726495726496,\n \"acc_norm_stderr\": 0.028911208802749482\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.24393358876117496,\n \"acc_stderr\": 0.015357212665829468,\n \"acc_norm\": 0.24393358876117496,\n \"acc_norm_stderr\": 0.015357212665829468\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.18006430868167203,\n \"acc_stderr\": 0.021823422857744953,\n \"acc_norm\": 0.18006430868167203,\n \"acc_norm_stderr\": 0.021823422857744953\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2434640522875817,\n \"acc_stderr\": 0.017362473762146634,\n \"acc_norm\": 0.2434640522875817,\n \"acc_norm_stderr\": 0.017362473762146634\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.23636363636363636,\n \"acc_stderr\": 0.04069306319721376,\n \"acc_norm\": 0.23636363636363636,\n \"acc_norm_stderr\": 0.04069306319721376\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.2891566265060241,\n \"acc_stderr\": 0.03529486801511115,\n \"acc_norm\": 0.2891566265060241,\n \"acc_norm_stderr\": 0.03529486801511115\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.22807017543859648,\n \"acc_stderr\": 0.03218093795602357,\n \"acc_norm\": 0.22807017543859648,\n \"acc_norm_stderr\": 0.03218093795602357\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23255813953488372,\n \"mc1_stderr\": 0.014789157531080514,\n \"mc2\": 0.48471292342924077,\n \"mc2_stderr\": 0.016304873353404845\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.4972375690607735,\n \"acc_stderr\": 0.014052271211616445\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/TomGrc/FN-OpenLLM_2x72B_MoE", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|arc:challenge|25_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|gsm8k|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hellaswag|10_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-06T01-52-10.589662.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["**/details_harness|winogrande|5_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-06T01-52-10.589662.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_06T01_52_10.589662", "path": ["results_2024-02-06T01-52-10.589662.parquet"]}, {"split": "latest", "path": ["results_2024-02-06T01-52-10.589662.parquet"]}]}]}
2024-02-06T01:54:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TomGrc/FN-OpenLLM_2x72B_MoE Dataset automatically created during the evaluation run of model TomGrc/FN-OpenLLM_2x72B_MoE on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-06T01:52:10.589662(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of TomGrc/FN-OpenLLM_2x72B_MoE\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FN-OpenLLM_2x72B_MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-06T01:52:10.589662(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TomGrc/FN-OpenLLM_2x72B_MoE\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FN-OpenLLM_2x72B_MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-06T01:52:10.589662(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
8e0f7354145c35091372038302c84002b8cfb6bd
# πŸš€ Load Dataset ```python from datasets import load_dataset dataset = load_dataset("shuyuej/paraphrasing_cot") dataset = dataset["train"] print(dataset) ```
shuyuej/paraphrasing_cot
[ "license:apache-2.0", "region:us" ]
2024-02-06T02:21:36+00:00
{"license": "apache-2.0"}
2024-02-06T02:22:39+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
# Load Dataset
[ "# Load Dataset" ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "# Load Dataset" ]
edfd495055c4d8487b7146db7e335b70b195dc38
# Dataset Card for "FINCH_TEST" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
FINNUMBER/FINCH_TEST_FULL
[ "region:us" ]
2024-02-06T02:31:34+00:00
{"dataset_info": {"features": [{"name": "task", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "instruction", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 118761883, "num_examples": 34092}], "download_size": 44992827, "dataset_size": 118761883}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-06T02:31:45+00:00
[]
[]
TAGS #region-us
# Dataset Card for "FINCH_TEST" More Information needed
[ "# Dataset Card for \"FINCH_TEST\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"FINCH_TEST\"\n\nMore Information needed" ]
5c2cfeca888f0885a48b3593777612a005b471b6
# Dataset Card for KABR Telemetry: In-Situ Dataset for Kenyan Animal Behavior Recognition from Drone Videos ## Dataset Details ### Dataset Description This dataset contains the drone telemetry data associated with the [KABR](https://huggingface.co/datasets/imageomics/KABR) dataset. The KABR dataset contains annotated video behavior of zebras and giraffes at the Mpala Research Centre. This telemetry dataset contains information about the status drone during the missions, including location and altitude, along with the bounding box dimensions of the wildlife in the frame and behavior annotation information. Please see the "kabr_telemetry_metadata.csv" for more details. - **Paper:** [Integrating Biological Data into Autonomous Remote Sensing Systems for In Situ Imageomics: A Case Study for Kenyan Animal Behavior Sensing with Unmanned Aerial Vehicles (UAVs)](https://jennamk14.github.io/images/Integrating%20Biological%20Data%20into%20Autonomous%20Remote%20Sensing%20Systems%20for%20In%20Situ%20Imageomics-%20A%20Case%20Study%20for%20Kenyan%20Animal%20Behavior%20Sensing%20with%20Unmanned%20Aerial%20Vehicles%20(UAVs).pdf) - **Point of Contact:** Jenna Kline, [email protected] - **Curated by:** Jenna Kline, Maksim Kholiavchenko, Otto Brookes, Tanya Berger-Wolf, Charles V. Stewart, and Christopher Stewart - **Funded by:** Imageomics - **Shared by:** Jenna Kline ## Uses This dataset is intended to be used to provide guidance on executing wildlife behavior collection missions with drones, which can be conducted by drone pilots manually, or integrated into an autonomous navigation framework. ## Dataset Creation ### Curation Rationale This dataset was curated to provide additional context to the KABR dataset, and provide spatial information which can be used to develop autonomous navigation algorithms for wildlife data collection. #### Data Collection and Processing This data was collected at the Mpala Research Centre in Laikipia, Kenya in January 2023. A DJI Mavic Air 2 drone was used to collect the data, and [AirData](https://airdata.com/) was used to process DJI telemetry files. ### Annotations Please refer to the [KABR]((https://huggingface.co/datasets/imageomics/KABR)) dataset and associated paper for details on the annotation process. ## Additional Information ### Authors * Jenna Kline (The Ohio State University) * Maksim Kholiavchenko (Rensselaer Polytechnic Institute) - ORCID: 0000-0001-6757-1957 * Otto Brookes (University of Bristol) * Tanya Berger-Wolf (The Ohio State University) - ORCID: 0000-0001-7610-1412 * Charles V. Stewart (Rensselaer Polytechnic Institute) * Christopher Stewart (The Ohio State University) ### Licensing Information This dataset is dedicated to the public domain for the benefit of scientific pursuits. We ask that you cite the dataset and journal paper using the below citations if you make use of it in your research. ### Citation Information #### Dataset ``` @misc{KABR_telemetry, author = {Kline, Jenna, Kholiavchenko, Maksim and Berger-Wolf, Tanya and Stewart, Charles V. and Stewart, Christopher}, title = {KABR Telemetry}, year = {2024}, url = {https://huggingface.co/datasets/imageomics/KABR-telemetry}, doi = {doi:10.57967/hf/1745}, publisher = {Hugging Face} } ``` #### Paper ``` @inproceedings{kline_kabr_telemetry, title={Integrating Biological Data into Autonomous Remote Sensing Systems for In Situ Imageomics: A Case Study for Kenyan Animal Behavior Sensing with Unmanned Aerial Vehicles (UAVs)}, author={Kline, Jenna and Kholiavchenko, Maksim and Berger-Wolf, Tanya and Stewart, Charles V. and Stewart, Christopher}}, booktitle={Proceedings of the First Workshop on Imageomics: Discovering Biological Knowledge from Images using AI, held as part of AAAI 24}, year={2024} } ``` ### Contributions The [Imageomics Institute](https://imageomics.org) is funded by the US National Science Foundation's Harnessing the Data Revolution (HDR) program under [Award #2118240](https://www.nsf.gov/awardsearch/showAward?AWD_ID=2118240) (Imageomics: A New Frontier of Biological Information Powered by Knowledge-Guided Machine Learning). Any opinions, findings and conclusions or recommendations expressed in this material are those of the author(s) and do not necessarily reflect the views of the National Science Foundation.
imageomics/KABR-telemetry
[ "task_categories:robotics", "size_categories:100K<n<1M", "language:en", "license:cc0-1.0", "drone", "ecology", "zebra", "Grevy's zebra", "conservation", "UAV", "doi:10.57967/hf/1745", "region:us" ]
2024-02-06T03:40:08+00:00
{"language": ["en"], "license": "cc0-1.0", "size_categories": ["100K<n<1M"], "task_categories": ["robotics"], "tags": ["drone", "ecology", "zebra", "Grevy's zebra", "conservation", "UAV"], "configs": [{"config_name": "telemetry_data", "data_files": "consolidated_metadata.csv", "default": true}, {"config_name": "column_information", "data_files": "kabr_telemetry_metadata.csv"}]}
2024-02-09T04:20:14+00:00
[]
[ "en" ]
TAGS #task_categories-robotics #size_categories-100K<n<1M #language-English #license-cc0-1.0 #drone #ecology #zebra #Grevy's zebra #conservation #UAV #doi-10.57967/hf/1745 #region-us
# Dataset Card for KABR Telemetry: In-Situ Dataset for Kenyan Animal Behavior Recognition from Drone Videos ## Dataset Details ### Dataset Description This dataset contains the drone telemetry data associated with the KABR dataset. The KABR dataset contains annotated video behavior of zebras and giraffes at the Mpala Research Centre. This telemetry dataset contains information about the status drone during the missions, including location and altitude, along with the bounding box dimensions of the wildlife in the frame and behavior annotation information. Please see the "kabr_telemetry_metadata.csv" for more details. - Paper: Integrating Biological Data into Autonomous Remote Sensing Systems for In Situ Imageomics: A Case Study for Kenyan Animal Behavior Sensing with Unmanned Aerial Vehicles (UAVs).pdf) - Point of Contact: Jenna Kline, kline.377@URL - Curated by: Jenna Kline, Maksim Kholiavchenko, Otto Brookes, Tanya Berger-Wolf, Charles V. Stewart, and Christopher Stewart - Funded by: Imageomics - Shared by: Jenna Kline ## Uses This dataset is intended to be used to provide guidance on executing wildlife behavior collection missions with drones, which can be conducted by drone pilots manually, or integrated into an autonomous navigation framework. ## Dataset Creation ### Curation Rationale This dataset was curated to provide additional context to the KABR dataset, and provide spatial information which can be used to develop autonomous navigation algorithms for wildlife data collection. #### Data Collection and Processing This data was collected at the Mpala Research Centre in Laikipia, Kenya in January 2023. A DJI Mavic Air 2 drone was used to collect the data, and AirData was used to process DJI telemetry files. ### Annotations Please refer to the KABR) dataset and associated paper for details on the annotation process. ## Additional Information ### Authors * Jenna Kline (The Ohio State University) * Maksim Kholiavchenko (Rensselaer Polytechnic Institute) - ORCID: 0000-0001-6757-1957 * Otto Brookes (University of Bristol) * Tanya Berger-Wolf (The Ohio State University) - ORCID: 0000-0001-7610-1412 * Charles V. Stewart (Rensselaer Polytechnic Institute) * Christopher Stewart (The Ohio State University) ### Licensing Information This dataset is dedicated to the public domain for the benefit of scientific pursuits. We ask that you cite the dataset and journal paper using the below citations if you make use of it in your research. #### Dataset #### Paper ### Contributions The Imageomics Institute is funded by the US National Science Foundation's Harnessing the Data Revolution (HDR) program under Award #2118240 (Imageomics: A New Frontier of Biological Information Powered by Knowledge-Guided Machine Learning). Any opinions, findings and conclusions or recommendations expressed in this material are those of the author(s) and do not necessarily reflect the views of the National Science Foundation.
[ "# Dataset Card for KABR Telemetry: In-Situ Dataset for Kenyan Animal Behavior Recognition from Drone Videos", "## Dataset Details", "### Dataset Description\n\nThis dataset contains the drone telemetry data associated with the KABR dataset. The KABR dataset contains annotated video behavior of zebras and giraffes at the Mpala Research Centre. This telemetry dataset contains information about the status drone during the missions, including location and altitude, along with the bounding box dimensions of the wildlife in the frame and behavior annotation information. Please see the \"kabr_telemetry_metadata.csv\" for more details.\n\n- Paper: Integrating Biological Data into Autonomous Remote Sensing Systems for In Situ Imageomics: A Case Study for Kenyan Animal Behavior Sensing with Unmanned Aerial Vehicles (UAVs).pdf)\n- Point of Contact: Jenna Kline, kline.377@URL \n\n- Curated by: Jenna Kline, Maksim Kholiavchenko, Otto Brookes, Tanya Berger-Wolf, Charles V. Stewart, and Christopher Stewart\n- Funded by: Imageomics\n- Shared by: Jenna Kline", "## Uses\n\nThis dataset is intended to be used to provide guidance on executing wildlife behavior collection missions with drones, which can be conducted by drone pilots manually, or integrated into an autonomous navigation framework.", "## Dataset Creation", "### Curation Rationale\n\nThis dataset was curated to provide additional context to the KABR dataset, and provide spatial information which can be used to develop autonomous navigation algorithms for wildlife data collection.", "#### Data Collection and Processing\n\nThis data was collected at the Mpala Research Centre in Laikipia, Kenya in January 2023. A DJI Mavic Air 2 drone was used to collect the data, and AirData was used to process DJI telemetry files.", "### Annotations\n\nPlease refer to the KABR) dataset and associated paper for details on the annotation process.", "## Additional Information", "### Authors\n\n* Jenna Kline (The Ohio State University)\n* Maksim Kholiavchenko (Rensselaer Polytechnic Institute) - ORCID: 0000-0001-6757-1957\n* Otto Brookes (University of Bristol)\n* Tanya Berger-Wolf (The Ohio State University) - ORCID: 0000-0001-7610-1412\n* Charles V. Stewart (Rensselaer Polytechnic Institute)\n* Christopher Stewart (The Ohio State University)", "### Licensing Information\n\nThis dataset is dedicated to the public domain for the benefit of scientific pursuits. We ask that you cite the dataset and journal paper using the below citations if you make use of it in your research.", "#### Dataset", "#### Paper", "### Contributions\n\nThe Imageomics Institute is funded by the US National Science Foundation's Harnessing the Data Revolution (HDR) program under Award #2118240 (Imageomics: A New Frontier of Biological Information Powered by Knowledge-Guided Machine Learning). Any opinions, findings and conclusions or recommendations expressed in this material are those of the author(s) and do not necessarily reflect the views of the National Science Foundation." ]
[ "TAGS\n#task_categories-robotics #size_categories-100K<n<1M #language-English #license-cc0-1.0 #drone #ecology #zebra #Grevy's zebra #conservation #UAV #doi-10.57967/hf/1745 #region-us \n", "# Dataset Card for KABR Telemetry: In-Situ Dataset for Kenyan Animal Behavior Recognition from Drone Videos", "## Dataset Details", "### Dataset Description\n\nThis dataset contains the drone telemetry data associated with the KABR dataset. The KABR dataset contains annotated video behavior of zebras and giraffes at the Mpala Research Centre. This telemetry dataset contains information about the status drone during the missions, including location and altitude, along with the bounding box dimensions of the wildlife in the frame and behavior annotation information. Please see the \"kabr_telemetry_metadata.csv\" for more details.\n\n- Paper: Integrating Biological Data into Autonomous Remote Sensing Systems for In Situ Imageomics: A Case Study for Kenyan Animal Behavior Sensing with Unmanned Aerial Vehicles (UAVs).pdf)\n- Point of Contact: Jenna Kline, kline.377@URL \n\n- Curated by: Jenna Kline, Maksim Kholiavchenko, Otto Brookes, Tanya Berger-Wolf, Charles V. Stewart, and Christopher Stewart\n- Funded by: Imageomics\n- Shared by: Jenna Kline", "## Uses\n\nThis dataset is intended to be used to provide guidance on executing wildlife behavior collection missions with drones, which can be conducted by drone pilots manually, or integrated into an autonomous navigation framework.", "## Dataset Creation", "### Curation Rationale\n\nThis dataset was curated to provide additional context to the KABR dataset, and provide spatial information which can be used to develop autonomous navigation algorithms for wildlife data collection.", "#### Data Collection and Processing\n\nThis data was collected at the Mpala Research Centre in Laikipia, Kenya in January 2023. A DJI Mavic Air 2 drone was used to collect the data, and AirData was used to process DJI telemetry files.", "### Annotations\n\nPlease refer to the KABR) dataset and associated paper for details on the annotation process.", "## Additional Information", "### Authors\n\n* Jenna Kline (The Ohio State University)\n* Maksim Kholiavchenko (Rensselaer Polytechnic Institute) - ORCID: 0000-0001-6757-1957\n* Otto Brookes (University of Bristol)\n* Tanya Berger-Wolf (The Ohio State University) - ORCID: 0000-0001-7610-1412\n* Charles V. Stewart (Rensselaer Polytechnic Institute)\n* Christopher Stewart (The Ohio State University)", "### Licensing Information\n\nThis dataset is dedicated to the public domain for the benefit of scientific pursuits. We ask that you cite the dataset and journal paper using the below citations if you make use of it in your research.", "#### Dataset", "#### Paper", "### Contributions\n\nThe Imageomics Institute is funded by the US National Science Foundation's Harnessing the Data Revolution (HDR) program under Award #2118240 (Imageomics: A New Frontier of Biological Information Powered by Knowledge-Guided Machine Learning). Any opinions, findings and conclusions or recommendations expressed in this material are those of the author(s) and do not necessarily reflect the views of the National Science Foundation." ]
f698c7c3f30641350f7fe0a96d0df7307e336e1f
The "frames" column of this dataset holds a sequence of images, where each sequence represents one video. Each video depicts a virtual hand, animated in Unity, tracing out sketches from the [Quick, Draw!](https://quickdraw.withgoogle.com) dataset. The "sketch" column shows the original sketch, and the "label" column is the prompt given to the artist before each sketch was drawn.
hxgrace/syntheticHands269
[ "region:us" ]
2024-02-06T05:05:35+00:00
{"dataset_info": {"features": [{"name": "frames", "sequence": "image"}, {"name": "sketch", "dtype": "image"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 507943100.0, "num_examples": 269}], "download_size": 3596173, "dataset_size": 507943100.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-07T02:12:49+00:00
[]
[]
TAGS #region-us
The "frames" column of this dataset holds a sequence of images, where each sequence represents one video. Each video depicts a virtual hand, animated in Unity, tracing out sketches from the Quick, Draw! dataset. The "sketch" column shows the original sketch, and the "label" column is the prompt given to the artist before each sketch was drawn.
[]
[ "TAGS\n#region-us \n" ]
bf00a3dcbde13cd0ea4cc977f3d78785b6f4ecf1
# Dataset Card for "lmind_nq_train10000_eval6489_v1_ic_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train10000_eval6489_v1_ic_qa
[ "region:us" ]
2024-02-06T05:47:42+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_ic_qa", "path": "data/train_ic_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_ic_qa", "path": "data/eval_ic_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 1159729, "num_examples": 10000}, {"name": "train_ic_qa", "num_bytes": 7563876, "num_examples": 10000}, {"name": "train_recite_qa", "num_bytes": 7573876, "num_examples": 10000}, {"name": "eval_qa", "num_bytes": 752802, "num_examples": 6489}, {"name": "eval_ic_qa", "num_bytes": 4906186, "num_examples": 6489}, {"name": "eval_recite_qa", "num_bytes": 4912675, "num_examples": 6489}, {"name": "all_docs", "num_bytes": 9144930, "num_examples": 14014}, {"name": "all_docs_eval", "num_bytes": 9144126, "num_examples": 14014}, {"name": "train", "num_bytes": 7563876, "num_examples": 10000}, {"name": "validation", "num_bytes": 4906186, "num_examples": 6489}], "download_size": 9446520, "dataset_size": 57628262}}
2024-02-06T05:57:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train10000_eval6489_v1_ic_qa" More Information needed
[ "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_ic_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train10000_eval6489_v1_ic_qa\"\n\nMore Information needed" ]
6c3e1ffb4a021de567d5156d2c4bef97151148a6
# Dataset Card for "legalfi" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
sugeun/legalfi
[ "region:us" ]
2024-02-06T05:47:58+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 166499295, "num_examples": 157433}], "download_size": 71556009, "dataset_size": 166499295}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-06T05:49:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for "legalfi" More Information needed
[ "# Dataset Card for \"legalfi\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"legalfi\"\n\nMore Information needed" ]
3d0b7c2e9c35523a0daacd69821b1816b24afd7d
# Dataset Card for "metatree_mfeat_fourier" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_mfeat_fourier
[ "region:us" ]
2024-02-06T05:53:37+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 862872, "num_examples": 1374}, {"name": "validation", "num_bytes": 393128, "num_examples": 626}], "download_size": 0, "dataset_size": 1256000}}
2024-02-06T05:54:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_mfeat_fourier" More Information needed
[ "# Dataset Card for \"metatree_mfeat_fourier\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_mfeat_fourier\"\n\nMore Information needed" ]
f09e8848e19ab0ba5ad8d3213018e74178af7508
# Dataset Card for "metatree_mfeat_zernike" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_mfeat_zernike
[ "region:us" ]
2024-02-06T05:54:58+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 553212, "num_examples": 1397}, {"name": "validation", "num_bytes": 238788, "num_examples": 603}], "download_size": 948784, "dataset_size": 792000}}
2024-02-06T05:55:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_mfeat_zernike" More Information needed
[ "# Dataset Card for \"metatree_mfeat_zernike\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_mfeat_zernike\"\n\nMore Information needed" ]
0fbc48847891921e324333c9cdea45d76e390030
# Dataset Card for "metatree_mfeat_morphological" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_mfeat_morphological
[ "region:us" ]
2024-02-06T05:55:14+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 94656, "num_examples": 1392}, {"name": "validation", "num_bytes": 41344, "num_examples": 608}], "download_size": 113815, "dataset_size": 136000}}
2024-02-06T05:55:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_mfeat_morphological" More Information needed
[ "# Dataset Card for \"metatree_mfeat_morphological\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_mfeat_morphological\"\n\nMore Information needed" ]
1d50754149dc5342e8f7d01bcdb81f45fc0ddbf0
# Dataset Card for "metatree_mfeat_karhunen" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_mfeat_karhunen
[ "region:us" ]
2024-02-06T05:55:29+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 741076, "num_examples": 1393}, {"name": "validation", "num_bytes": 322924, "num_examples": 607}], "download_size": 1290598, "dataset_size": 1064000}}
2024-02-06T05:55:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_mfeat_karhunen" More Information needed
[ "# Dataset Card for \"metatree_mfeat_karhunen\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_mfeat_karhunen\"\n\nMore Information needed" ]
e45d083f60bacbdc455313ff6d442ca72274b78c
# Dataset Card for "metatree_page_blocks" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_page_blocks
[ "region:us" ]
2024-02-06T05:55:44+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 386300, "num_examples": 3863}, {"name": "validation", "num_bytes": 161000, "num_examples": 1610}], "download_size": 367575, "dataset_size": 547300}}
2024-02-06T05:55:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_page_blocks" More Information needed
[ "# Dataset Card for \"metatree_page_blocks\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_page_blocks\"\n\nMore Information needed" ]
4490920077a516982c06442ca42cfad6b82ab1f0
# Dataset Card for "metatree_optdigits" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_optdigits
[ "region:us" ]
2024-02-06T05:55:59+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 2116828, "num_examples": 3979}, {"name": "validation", "num_bytes": 873012, "num_examples": 1641}], "download_size": 2648679, "dataset_size": 2989840}}
2024-02-06T05:56:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_optdigits" More Information needed
[ "# Dataset Card for \"metatree_optdigits\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_optdigits\"\n\nMore Information needed" ]
1ebdc758e77e4a623f66c3bc2097a84def5be56f
# Dataset Card for "metatree_pendigits" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_pendigits
[ "region:us" ]
2024-02-06T05:56:15+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1143744, "num_examples": 7728}, {"name": "validation", "num_bytes": 483072, "num_examples": 3264}], "download_size": 1332707, "dataset_size": 1626816}}
2024-02-06T05:56:18+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_pendigits" More Information needed
[ "# Dataset Card for \"metatree_pendigits\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_pendigits\"\n\nMore Information needed" ]
9233ec8a0f65b27eed27c6e2dbb718d46fc38e5c
# Dataset Card for "metatree_waveform_5000" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_waveform_5000
[ "region:us" ]
2024-02-06T05:56:30+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1198160, "num_examples": 3524}, {"name": "validation", "num_bytes": 501840, "num_examples": 1476}], "download_size": 330451, "dataset_size": 1700000}}
2024-02-06T05:56:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_waveform_5000" More Information needed
[ "# Dataset Card for \"metatree_waveform_5000\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_waveform_5000\"\n\nMore Information needed" ]
9d5d2f979585dfc8e2bae496bfd8160fab2936e7
# Dataset Card for "metatree_Hyperplane_10_1E_3" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_Hyperplane_10_1E_3
[ "region:us" ]
2024-02-06T05:56:49+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 69953100, "num_examples": 699531}, {"name": "validation", "num_bytes": 30046900, "num_examples": 300469}], "download_size": 103582899, "dataset_size": 100000000}}
2024-02-06T05:56:57+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_Hyperplane_10_1E_3" More Information needed
[ "# Dataset Card for \"metatree_Hyperplane_10_1E_3\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_Hyperplane_10_1E_3\"\n\nMore Information needed" ]
4934add5df69617cda22917a77115d294189218d
# Dataset Card for "metatree_Hyperplane_10_1E_4" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_Hyperplane_10_1E_4
[ "region:us" ]
2024-02-06T05:57:13+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 70103500, "num_examples": 701035}, {"name": "validation", "num_bytes": 29896500, "num_examples": 298965}], "download_size": 103585686, "dataset_size": 100000000}}
2024-02-06T05:57:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_Hyperplane_10_1E_4" More Information needed
[ "# Dataset Card for \"metatree_Hyperplane_10_1E_4\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_Hyperplane_10_1E_4\"\n\nMore Information needed" ]
1b04c2227c63fdc4dff43d71f81fca4fb894c949
# Dataset Card for "metatree_pokerhand" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_pokerhand
[ "region:us" ]
2024-02-06T05:57:34+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "uint8"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 14510800, "num_examples": 580432}, {"name": "validation", "num_bytes": 6219225, "num_examples": 248769}], "download_size": 7116755, "dataset_size": 20730025}}
2024-02-06T05:57:37+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_pokerhand" More Information needed
[ "# Dataset Card for \"metatree_pokerhand\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_pokerhand\"\n\nMore Information needed" ]
c92e2229fcd45fd10a1f73a3bc8ffef464356253
# Dataset Card for "metatree_RandomRBF_0_0" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_RandomRBF_0_0
[ "region:us" ]
2024-02-06T05:57:53+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 69979900, "num_examples": 699799}, {"name": "validation", "num_bytes": 30020100, "num_examples": 300201}], "download_size": 103911586, "dataset_size": 100000000}}
2024-02-06T05:58:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_RandomRBF_0_0" More Information needed
[ "# Dataset Card for \"metatree_RandomRBF_0_0\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_RandomRBF_0_0\"\n\nMore Information needed" ]
937f68c7251a56675d8804b66fa277fb813bacf8
# Dataset Card for "metatree_RandomRBF_10_1E_3" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_RandomRBF_10_1E_3
[ "region:us" ]
2024-02-06T05:58:15+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 69947800, "num_examples": 699478}, {"name": "validation", "num_bytes": 30052200, "num_examples": 300522}], "download_size": 103914523, "dataset_size": 100000000}}
2024-02-06T05:58:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_RandomRBF_10_1E_3" More Information needed
[ "# Dataset Card for \"metatree_RandomRBF_10_1E_3\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_RandomRBF_10_1E_3\"\n\nMore Information needed" ]
d4185ecd7f2c00440afdeb2728e41f65cbb54609
# Dataset Card for "metatree_RandomRBF_50_1E_3" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_RandomRBF_50_1E_3
[ "region:us" ]
2024-02-06T05:58:37+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 69928800, "num_examples": 699288}, {"name": "validation", "num_bytes": 30071200, "num_examples": 300712}], "download_size": 103917487, "dataset_size": 100000000}}
2024-02-06T05:58:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_RandomRBF_50_1E_3" More Information needed
[ "# Dataset Card for \"metatree_RandomRBF_50_1E_3\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_RandomRBF_50_1E_3\"\n\nMore Information needed" ]
028a8cf0166c7b18a5b30565c010bc68cc599301
# Dataset Card for "metatree_RandomRBF_10_1E_4" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_RandomRBF_10_1E_4
[ "region:us" ]
2024-02-06T05:58:58+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 70077600, "num_examples": 700776}, {"name": "validation", "num_bytes": 29922400, "num_examples": 299224}], "download_size": 103910649, "dataset_size": 100000000}}
2024-02-06T05:59:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_RandomRBF_10_1E_4" More Information needed
[ "# Dataset Card for \"metatree_RandomRBF_10_1E_4\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_RandomRBF_10_1E_4\"\n\nMore Information needed" ]
5b993fc5ec8c0b89284bd83adf70c7c740b01845
# Dataset Card for "metatree_RandomRBF_50_1E_4" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_RandomRBF_50_1E_4
[ "region:us" ]
2024-02-06T05:59:21+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 69996400, "num_examples": 699964}, {"name": "validation", "num_bytes": 30003600, "num_examples": 300036}], "download_size": 103915009, "dataset_size": 100000000}}
2024-02-06T05:59:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_RandomRBF_50_1E_4" More Information needed
[ "# Dataset Card for \"metatree_RandomRBF_50_1E_4\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_RandomRBF_50_1E_4\"\n\nMore Information needed" ]
eaa5719ac53c9e7b4a6f87e2155c7618cb24d727
# Dataset Card for "metatree_SEA_50_" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_SEA_50_
[ "region:us" ]
2024-02-06T05:59:42+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 30798724, "num_examples": 699971}, {"name": "validation", "num_bytes": 13201276, "num_examples": 300029}], "download_size": 34891988, "dataset_size": 44000000}}
2024-02-06T05:59:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_SEA_50_" More Information needed
[ "# Dataset Card for \"metatree_SEA_50_\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_SEA_50_\"\n\nMore Information needed" ]
a795329ee238fc3cdad0244a0070f06dad4b3fc5
# Dataset Card for "metatree_SEA_50000_" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_SEA_50000_
[ "region:us" ]
2024-02-06T06:00:01+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 30819844, "num_examples": 700451}, {"name": "validation", "num_bytes": 13180156, "num_examples": 299549}], "download_size": 34891458, "dataset_size": 44000000}}
2024-02-06T06:00:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_SEA_50000_" More Information needed
[ "# Dataset Card for \"metatree_SEA_50000_\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_SEA_50000_\"\n\nMore Information needed" ]
e6249c4aed7fd92ab4a49a0c5359a4c1fda17662
# Dataset Card for "metatree_satimage" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_satimage
[ "region:us" ]
2024-02-06T06:00:18+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1383228, "num_examples": 4491}, {"name": "validation", "num_bytes": 597212, "num_examples": 1939}], "download_size": 530639, "dataset_size": 1980440}}
2024-02-06T06:00:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_satimage" More Information needed
[ "# Dataset Card for \"metatree_satimage\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_satimage\"\n\nMore Information needed" ]
970e083fc0dfa01d3b59b2cac80c7961f4c3e466
# Dataset Card for "metatree_BNG_labor_" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_BNG_labor_
[ "region:us" ]
2024-02-06T06:00:35+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 58804032, "num_examples": 700048}, {"name": "validation", "num_bytes": 25195968, "num_examples": 299952}], "download_size": 40166890, "dataset_size": 84000000}}
2024-02-06T06:00:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_BNG_labor_" More Information needed
[ "# Dataset Card for \"metatree_BNG_labor_\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_BNG_labor_\"\n\nMore Information needed" ]
b95388dd819fd7b29bdc7db79a2c658e6db1c58b
# Dataset Card for "metatree_BNG_breast_w_" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_BNG_breast_w_
[ "region:us" ]
2024-02-06T06:00:51+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 2519880, "num_examples": 27390}, {"name": "validation", "num_bytes": 1101792, "num_examples": 11976}], "download_size": 1587654, "dataset_size": 3621672}}
2024-02-06T06:00:54+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_BNG_breast_w_" More Information needed
[ "# Dataset Card for \"metatree_BNG_breast_w_\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_BNG_breast_w_\"\n\nMore Information needed" ]
435f5b70a95a5daa944acc488c20babe770baf04
# Dataset Card for "metatree_BNG_mfeat_karhunen_" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_BNG_mfeat_karhunen_
[ "region:us" ]
2024-02-06T06:01:17+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 372280300, "num_examples": 699775}, {"name": "validation", "num_bytes": 159719700, "num_examples": 300225}], "download_size": 644764994, "dataset_size": 532000000}}
2024-02-06T06:01:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_BNG_mfeat_karhunen_" More Information needed
[ "# Dataset Card for \"metatree_BNG_mfeat_karhunen_\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_BNG_mfeat_karhunen_\"\n\nMore Information needed" ]
527f67e0d008dbf1334fb4994cf2f339f45cdc81
# Dataset Card for "metatree_BNG_bridges_version1_" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_BNG_bridges_version1_
[ "region:us" ]
2024-02-06T06:02:00+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 30808316, "num_examples": 700189}, {"name": "validation", "num_bytes": 13191684, "num_examples": 299811}], "download_size": 26801061, "dataset_size": 44000000}}
2024-02-06T06:02:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_BNG_bridges_version1_" More Information needed
[ "# Dataset Card for \"metatree_BNG_bridges_version1_\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_BNG_bridges_version1_\"\n\nMore Information needed" ]
3b9f88f43192c06517c74c86e657bab638eaf092
# Dataset Card for "metatree_BNG_mfeat_zernike_" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/metatree_BNG_mfeat_zernike_
[ "region:us" ]
2024-02-06T06:02:26+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "X", "sequence": "float64"}, {"name": "y", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 277411860, "num_examples": 700535}, {"name": "validation", "num_bytes": 118588140, "num_examples": 299465}], "download_size": 476793911, "dataset_size": 396000000}}
2024-02-06T06:02:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for "metatree_BNG_mfeat_zernike_" More Information needed
[ "# Dataset Card for \"metatree_BNG_mfeat_zernike_\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"metatree_BNG_mfeat_zernike_\"\n\nMore Information needed" ]