Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -281,619 +281,618 @@ def download_all_data():
|
|
281 |
# --------------------------------------------------------------------
|
282 |
# This is your larger dataset, rank = 44..105
|
283 |
benchmark_data = [
|
284 |
-
"""
|
285 |
{
|
286 |
-
rank: 1,
|
287 |
-
name: "sometimesanotion/Lamarck-14B-v0.7-rc4",
|
288 |
-
scores: {
|
289 |
-
average: 41.22,
|
290 |
-
IFEval: 72.11,
|
291 |
-
BBH: 49.85,
|
292 |
-
MATH: 36.86,
|
293 |
-
GPQA: 18.57,
|
294 |
-
MUSR: 21.07,
|
295 |
-
MMLU_PRO: 48.89,
|
296 |
-
Architecture: "Qwen2ForCausalLM",
|
297 |
-
Parameters: "14.766B",
|
298 |
-
Chat_Template: "No"
|
299 |
},
|
300 |
-
hf_url: "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.7-rc4",
|
301 |
-
known_config: None
|
302 |
},
|
303 |
{
|
304 |
-
rank: 2,
|
305 |
-
name: "arcee-ai/Virtuoso-Small-v2",
|
306 |
-
scores: {
|
307 |
-
average: 41.08,
|
308 |
-
IFEval: 82.73,
|
309 |
-
BBH: 50.95,
|
310 |
-
MATH: 38.22,
|
311 |
-
GPQA: 13.76,
|
312 |
-
MUSR: 14.28,
|
313 |
-
MMLU_PRO: 46.53,
|
314 |
-
Architecture: "Qwen2ForCausalLM",
|
315 |
-
Parameters: "14.766B",
|
316 |
-
Chat_Template: "Yes"
|
317 |
},
|
318 |
-
hf_url: "https://huggingface.co/arcee-ai/Virtuoso-Small-v2",
|
319 |
-
known_config: None
|
320 |
},
|
321 |
{
|
322 |
-
rank: 3,
|
323 |
-
name: "sometimesanotion/Qwenvergence-14B-v12-Prose-DS",
|
324 |
-
scores: {
|
325 |
-
average: 41.08,
|
326 |
-
IFEval: 61.73,
|
327 |
-
BBH: 49.87,
|
328 |
-
MATH: 42.30,
|
329 |
-
GPQA: 19.24,
|
330 |
-
MUSR: 24.78,
|
331 |
-
MMLU_PRO: 48.54,
|
332 |
-
Architecture: "Qwen2ForCausalLM",
|
333 |
-
Parameters: "14.766B",
|
334 |
-
Chat_Template: "No"
|
335 |
},
|
336 |
-
hf_url: "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v12-Prose-DS",
|
337 |
-
known_config: None
|
338 |
},
|
339 |
{
|
340 |
-
rank: 4,
|
341 |
-
name: "bunnycore/Phi-4-Model-Stock-v4",
|
342 |
-
scores: {
|
343 |
-
average: 41.03,
|
344 |
-
IFEval: 71.10,
|
345 |
-
BBH: 55.90,
|
346 |
-
MATH: 37.16,
|
347 |
-
GPQA: 15.88,
|
348 |
-
MUSR: 17.30,
|
349 |
-
MMLU_PRO: 48.82,
|
350 |
-
Architecture: "LlamaForCausalLM",
|
351 |
-
Parameters: "14.66B",
|
352 |
-
Chat_Template: "Yes"
|
353 |
},
|
354 |
-
hf_url: "https://huggingface.co/bunnycore/Phi-4-Model-Stock-v4",
|
355 |
-
known_config: None
|
356 |
},
|
357 |
{
|
358 |
-
rank: 5,
|
359 |
-
name: "sthenno/tempesthenno-nuslerp-0124",
|
360 |
-
scores: {
|
361 |
-
average: 40.97,
|
362 |
-
IFEval: 70.04,
|
363 |
-
BBH: 49.28,
|
364 |
-
MATH: 39.27,
|
365 |
-
GPQA: 18.68,
|
366 |
-
MUSR: 20.21,
|
367 |
-
MMLU_PRO: 48.36,
|
368 |
-
Architecture: "Qwen2ForCausalLM",
|
369 |
-
Parameters: "14.766B",
|
370 |
-
Chat_Template: "No"
|
371 |
},
|
372 |
-
hf_url: "https://huggingface.co/sthenno/tempesthenno-nuslerp-0124",
|
373 |
-
known_config: None
|
374 |
},
|
375 |
{
|
376 |
-
rank: 6,
|
377 |
-
name: "bunnycore/Phi-4-RR-Shoup",
|
378 |
-
scores: {
|
379 |
-
average: 40.95,
|
380 |
-
IFEval: 65.87,
|
381 |
-
BBH: 56.11,
|
382 |
-
MATH: 47.96,
|
383 |
-
GPQA: 11.63,
|
384 |
-
MUSR: 14.94,
|
385 |
-
MMLU_PRO: 49.21,
|
386 |
-
Architecture: "LlamaForCausalLM",
|
387 |
-
Parameters: "14.66B",
|
388 |
-
Chat_Template: "Yes"
|
389 |
},
|
390 |
-
hf_url: "https://huggingface.co/bunnycore/Phi-4-RR-Shoup",
|
391 |
-
known_config: None
|
392 |
},
|
393 |
{
|
394 |
-
rank: 7,
|
395 |
-
name: "sometimesanotion/Qwenvergence-14B-v10",
|
396 |
-
scores: {
|
397 |
-
average: 40.86,
|
398 |
-
IFEval: 67.57,
|
399 |
-
BBH: 46.75,
|
400 |
-
MATH: 44.18,
|
401 |
-
GPQA: 17.23,
|
402 |
-
MUSR: 22.33,
|
403 |
-
MMLU_PRO: 47.10,
|
404 |
-
Architecture: "Qwen2ForCausalLM",
|
405 |
-
Parameters: "14.766B",
|
406 |
-
Chat_Template: "No"
|
407 |
},
|
408 |
-
hf_url: "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v10",
|
409 |
-
known_config: None
|
410 |
},
|
411 |
{
|
412 |
-
rank: 8,
|
413 |
-
name: "bunnycore/Phi-4-RStock-v0.1",
|
414 |
-
scores: {
|
415 |
-
average: 40.84,
|
416 |
-
IFEval: 70.03,
|
417 |
-
BBH: 55.98,
|
418 |
-
MATH: 38.07,
|
419 |
-
GPQA: 15.32,
|
420 |
-
MUSR: 16.73,
|
421 |
-
MMLU_PRO: 48.90,
|
422 |
-
Architecture: "LlamaForCausalLM",
|
423 |
-
Parameters: "14.66B",
|
424 |
-
Chat_Template: "Yes"
|
425 |
},
|
426 |
-
hf_url: "https://huggingface.co/bunnycore/Phi-4-RStock-v0.1",
|
427 |
-
known_config: None
|
428 |
},
|
429 |
{
|
430 |
-
rank: 9,
|
431 |
-
name: "jpacifico/Chocolatine-2-14B-Instruct-v2.0b3",
|
432 |
-
scores: {
|
433 |
-
average: 40.74,
|
434 |
-
IFEval: 73.23,
|
435 |
-
BBH: 49.57,
|
436 |
-
MATH: 36.93,
|
437 |
-
GPQA: 17.23,
|
438 |
-
MUSR: 19.30,
|
439 |
-
MMLU_PRO: 48.19,
|
440 |
-
Architecture: "Qwen2ForCausalLM",
|
441 |
-
Parameters: "14.766B",
|
442 |
-
Chat_Template: "No"
|
443 |
},
|
444 |
-
hf_url: "https://huggingface.co/jpacifico/Chocolatine-2-14B-Instruct-v2.0b3",
|
445 |
-
known_config: None
|
446 |
},
|
447 |
{
|
448 |
-
rank: 10,
|
449 |
-
name: "bunnycore/Phi-4-ReasoningRP",
|
450 |
-
scores: {
|
451 |
-
average: 40.73,
|
452 |
-
IFEval: 67.36,
|
453 |
-
BBH: 55.88,
|
454 |
-
MATH: 44.34,
|
455 |
-
GPQA: 12.53,
|
456 |
-
MUSR: 15.14,
|
457 |
-
MMLU_PRO: 49.12,
|
458 |
-
Architecture: "LlamaForCausalLM",
|
459 |
-
Parameters: "14.66B",
|
460 |
-
Chat_Template: "Yes"
|
461 |
},
|
462 |
-
hf_url: "https://huggingface.co/bunnycore/Phi-4-ReasoningRP",
|
463 |
-
known_config: None
|
464 |
},
|
465 |
{
|
466 |
-
rank: 11,
|
467 |
-
name: "sometimesanotion/Lamarck-14B-v0.7-rc1",
|
468 |
-
scores: {
|
469 |
-
average: 40.69,
|
470 |
-
IFEval: 73.05,
|
471 |
-
BBH: 49.51,
|
472 |
-
MATH: 35.80,
|
473 |
-
GPQA: 18.57,
|
474 |
-
MUSR: 18.13,
|
475 |
-
MMLU_PRO: 49.06,
|
476 |
-
Architecture: "Qwen2ForCausalLM",
|
477 |
-
Parameters: "14.766B",
|
478 |
-
Chat_Template: "No"
|
479 |
},
|
480 |
-
hf_url: "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.7-rc1",
|
481 |
-
known_config: None
|
482 |
},
|
483 |
{
|
484 |
-
rank: 12,
|
485 |
-
name: "jpacifico/Chocolatine-2-14B-Instruct-v2.0b2",
|
486 |
-
scores: {
|
487 |
-
average: 40.62,
|
488 |
-
IFEval: 72.41,
|
489 |
-
BBH: 49.58,
|
490 |
-
MATH: 35.73,
|
491 |
-
GPQA: 17.79,
|
492 |
-
MUSR: 19.66,
|
493 |
-
MMLU_PRO: 48.54,
|
494 |
-
Architecture: "Qwen2ForCausalLM",
|
495 |
-
Parameters: "14.766B",
|
496 |
-
Chat_Template: "No"
|
497 |
},
|
498 |
-
hf_url: "https://huggingface.co/jpacifico/Chocolatine-2-14B-Instruct-v2.0b2",
|
499 |
-
known_config: None
|
500 |
},
|
501 |
{
|
502 |
-
rank: 13,
|
503 |
-
name: "sometimesanotion/Qwenvergence-14B-v11",
|
504 |
-
scores: {
|
505 |
-
average: 40.56,
|
506 |
-
IFEval: 71.92,
|
507 |
-
BBH: 47.55,
|
508 |
-
MATH: 40.71,
|
509 |
-
GPQA: 16.33,
|
510 |
-
MUSR: 18.76,
|
511 |
-
MMLU_PRO: 48.08,
|
512 |
-
Architecture: "Qwen2ForCausalLM",
|
513 |
-
Parameters: "14.766B",
|
514 |
-
Chat_Template: "No"
|
515 |
},
|
516 |
-
hf_url: "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v11",
|
517 |
-
known_config: None
|
518 |
},
|
519 |
{
|
520 |
-
rank: 14,
|
521 |
-
name: "sthenno/tempesthenno-ppo-ckpt40",
|
522 |
-
scores: {
|
523 |
-
average: 40.55,
|
524 |
-
IFEval: 79.23,
|
525 |
-
BBH: 50.57,
|
526 |
-
MATH: 34.21,
|
527 |
-
GPQA: 17.00,
|
528 |
-
MUSR: 14.56,
|
529 |
-
MMLU_PRO: 47.69,
|
530 |
-
Architecture: "Qwen2ForCausalLM",
|
531 |
-
Parameters: "14.766B",
|
532 |
-
Chat_Template: "Yes"
|
533 |
},
|
534 |
-
hf_url: "https://huggingface.co/sthenno/tempesthenno-ppo-ckpt40",
|
535 |
-
known_config: None
|
536 |
},
|
537 |
{
|
538 |
-
rank: 15,
|
539 |
-
name: "tensopolis/virtuoso-small-v2-tensopolis-v1",
|
540 |
-
scores: {
|
541 |
-
average: 40.38,
|
542 |
-
IFEval: 80.94,
|
543 |
-
BBH: 50.46,
|
544 |
-
MATH: 35.88,
|
545 |
-
GPQA: 13.65,
|
546 |
-
MUSR: 15.82,
|
547 |
-
MMLU_PRO: 45.52,
|
548 |
-
Architecture: "Qwen2ForCausalLM",
|
549 |
-
Parameters: "14.766B",
|
550 |
-
Chat_Template: "Yes"
|
551 |
},
|
552 |
-
hf_url: "https://huggingface.co/tensopolis/virtuoso-small-v2-tensopolis-v1",
|
553 |
-
known_config: None
|
554 |
},
|
555 |
{
|
556 |
-
rank: 16,
|
557 |
-
name: "sometimesanotion/Lamarck-14B-v0.6",
|
558 |
-
scores: {
|
559 |
-
average: 40.37,
|
560 |
-
IFEval: 69.73,
|
561 |
-
BBH: 49.30,
|
562 |
-
MATH: 35.65,
|
563 |
-
GPQA: 18.57,
|
564 |
-
MUSR: 20.12,
|
565 |
-
MMLU_PRO: 48.89,
|
566 |
-
Architecture: "Qwen2ForCausalLM",
|
567 |
-
Parameters: "14.766B",
|
568 |
-
Chat_Template: "No"
|
569 |
},
|
570 |
-
hf_url: "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.6",
|
571 |
-
known_config: None
|
572 |
},
|
573 |
{
|
574 |
-
rank: 17,
|
575 |
-
name: "sthenno-com/miscii-14b-0130",
|
576 |
-
scores: {
|
577 |
-
average: 40.29,
|
578 |
-
IFEval: 66.47,
|
579 |
-
BBH: 49.84,
|
580 |
-
MATH: 38.44,
|
581 |
-
GPQA: 17.56,
|
582 |
-
MUSR: 20.96,
|
583 |
-
MMLU_PRO: 48.48,
|
584 |
-
Architecture: "Qwen2ForCausalLM",
|
585 |
-
Parameters: "14.766B",
|
586 |
-
Chat_Template: "No"
|
587 |
},
|
588 |
-
hf_url: "https://huggingface.co/sthenno-com/miscii-14b-0130",
|
589 |
-
known_config: None
|
590 |
},
|
591 |
{
|
592 |
-
rank: 18,
|
593 |
-
name: "hotmailuser/QwenSlerp2-14B",
|
594 |
-
scores: {
|
595 |
-
average: 40.21,
|
596 |
-
IFEval: 70.37,
|
597 |
-
BBH: 49.68,
|
598 |
-
MATH: 35.73,
|
599 |
-
GPQA: 17.45,
|
600 |
-
MUSR: 19.35,
|
601 |
-
MMLU_PRO: 48.66,
|
602 |
-
Architecture: "Qwen2ForCausalLM",
|
603 |
-
Parameters: "14.766B",
|
604 |
-
Chat_Template: "No"
|
605 |
},
|
606 |
-
hf_url: "https://huggingface.co/hotmailuser/QwenSlerp2-14B",
|
607 |
-
known_config: None
|
608 |
},
|
609 |
{
|
610 |
-
rank: 19,
|
611 |
-
name: "Sakalti/ultiima-14B-v0.2",
|
612 |
-
scores: {
|
613 |
-
average: 40.18,
|
614 |
-
IFEval: 70.70,
|
615 |
-
BBH: 49.51,
|
616 |
-
MATH: 35.27,
|
617 |
-
GPQA: 17.67,
|
618 |
-
MUSR: 19.19,
|
619 |
-
MMLU_PRO: 48.75,
|
620 |
-
Architecture: "Qwen2ForCausalLM",
|
621 |
-
Parameters: "14.766B",
|
622 |
-
Chat_Template: "No"
|
623 |
},
|
624 |
-
hf_url: "https://huggingface.co/Sakalti/ultiima-14B-v0.2",
|
625 |
-
known_config: None
|
626 |
},
|
627 |
{
|
628 |
-
rank: 20,
|
629 |
-
name: "pankajmathur/orca_mini_phi-4",
|
630 |
-
scores: {
|
631 |
-
average: 40.16,
|
632 |
-
IFEval: 77.81,
|
633 |
-
BBH: 54.63,
|
634 |
-
MATH: 26.44,
|
635 |
-
GPQA: 16.55,
|
636 |
-
MUSR: 18.25,
|
637 |
-
MMLU_PRO: 47.28,
|
638 |
-
Architecture: "LlamaForCausalLM",
|
639 |
-
Parameters: "14.66B",
|
640 |
-
Chat_Template: "Yes"
|
641 |
},
|
642 |
-
hf_url: "https://huggingface.co/pankajmathur/orca_mini_phi-4",
|
643 |
-
known_config: None
|
644 |
},
|
645 |
{
|
646 |
-
rank: 21,
|
647 |
-
name: "pankajmathur/orca_mini_v9_2_14B",
|
648 |
-
scores: {
|
649 |
-
average: 40.16,
|
650 |
-
IFEval: 77.81,
|
651 |
-
BBH: 54.63,
|
652 |
-
MATH: 26.44,
|
653 |
-
GPQA: 16.55,
|
654 |
-
MUSR: 18.25,
|
655 |
-
MMLU_PRO: 47.28,
|
656 |
-
Architecture: "LlamaForCausalLM",
|
657 |
-
Parameters: "14.66B",
|
658 |
-
Chat_Template: "Yes"
|
659 |
},
|
660 |
-
hf_url: "https://huggingface.co/pankajmathur/orca_mini_v9_2_14B",
|
661 |
-
known_config: None
|
662 |
},
|
663 |
{
|
664 |
-
rank: 22,
|
665 |
-
name: "sometimesanotion/Qwen2.5-14B-Vimarckoso-v3",
|
666 |
-
scores: {
|
667 |
-
average: 40.10,
|
668 |
-
IFEval: 72.57,
|
669 |
-
BBH: 48.58,
|
670 |
-
MATH: 34.44,
|
671 |
-
GPQA: 17.34,
|
672 |
-
MUSR: 19.39,
|
673 |
-
MMLU_PRO: 48.26,
|
674 |
-
Architecture: "Qwen2ForCausalLM",
|
675 |
-
Parameters: "14B",
|
676 |
-
Chat_Template: "No"
|
677 |
},
|
678 |
-
hf_url: "https://huggingface.co/sometimesanotion/Qwen2.5-14B-Vimarckoso-v3",
|
679 |
-
known_config: None
|
680 |
},
|
681 |
{
|
682 |
-
rank: 23,
|
683 |
-
name: "sthenno-com/miscii-14b-1225",
|
684 |
-
scores: {
|
685 |
-
average: 40.08,
|
686 |
-
IFEval: 78.78,
|
687 |
-
BBH: 50.91,
|
688 |
-
MATH: 31.57,
|
689 |
-
GPQA: 17.00,
|
690 |
-
MUSR: 14.77,
|
691 |
-
MMLU_PRO: 47.46,
|
692 |
-
Architecture: "Qwen2ForCausalLM",
|
693 |
-
Parameters: "14.766B",
|
694 |
-
Chat_Template: "Yes"
|
695 |
},
|
696 |
-
hf_url: "https://huggingface.co/sthenno-com/miscii-14b-1225",
|
697 |
-
known_config: None
|
698 |
},
|
699 |
{
|
700 |
-
rank: 24,
|
701 |
-
name: "bunnycore/Phi-4-Model-Stock",
|
702 |
-
scores: {
|
703 |
-
average: 40.06,
|
704 |
-
IFEval: 68.79,
|
705 |
-
BBH: 55.32,
|
706 |
-
MATH: 38.60,
|
707 |
-
GPQA: 13.98,
|
708 |
-
MUSR: 15.12,
|
709 |
-
MMLU_PRO: 48.54,
|
710 |
-
Architecture: "LlamaForCausalLM",
|
711 |
-
Parameters: "14.66B",
|
712 |
-
Chat_Template: "Yes"
|
713 |
},
|
714 |
-
hf_url: "https://huggingface.co/bunnycore/Phi-4-Model-Stock",
|
715 |
-
known_config: None
|
716 |
},
|
717 |
{
|
718 |
-
rank: 25,
|
719 |
-
name: "djuna/Q2.5-Veltha-14B-0.5",
|
720 |
-
scores: {
|
721 |
-
average: 39.96,
|
722 |
-
IFEval: 77.96,
|
723 |
-
BBH: 50.32,
|
724 |
-
MATH: 33.84,
|
725 |
-
GPQA: 15.77,
|
726 |
-
MUSR: 14.17,
|
727 |
-
MMLU_PRO: 47.72,
|
728 |
-
Architecture: "Qwen2ForCausalLM",
|
729 |
-
Parameters: "14.766B",
|
730 |
-
Chat_Template: "Yes"
|
731 |
},
|
732 |
-
hf_url: "https://huggingface.co/djuna/Q2.5-Veltha-14B-0.5",
|
733 |
-
known_config: None
|
734 |
},
|
735 |
{
|
736 |
-
rank: 26,
|
737 |
-
name: "ehristoforu/fp4-14b-v1-fix",
|
738 |
-
scores: {
|
739 |
-
average: 39.96,
|
740 |
-
IFEval: 67.42,
|
741 |
-
BBH: 54.33,
|
742 |
-
MATH: 39.58,
|
743 |
-
GPQA: 13.87,
|
744 |
-
MUSR: 16.18,
|
745 |
-
MMLU_PRO: 48.37,
|
746 |
-
Architecture: "LlamaForCausalLM",
|
747 |
-
Parameters: "14.66B",
|
748 |
-
Chat_Template: "Yes"
|
749 |
},
|
750 |
-
hf_url: "https://huggingface.co/ehristoforu/fp4-14b-v1-fix",
|
751 |
-
known_config: None
|
752 |
},
|
753 |
{
|
754 |
-
rank: 27,
|
755 |
-
name: "sthenno/tempesthenno-nuslerp-001",
|
756 |
-
scores: {
|
757 |
-
average: 39.94,
|
758 |
-
IFEval: 79.26,
|
759 |
-
BBH: 51.04,
|
760 |
-
MATH: 31.72,
|
761 |
-
GPQA: 16.44,
|
762 |
-
MUSR: 13.88,
|
763 |
-
MMLU_PRO: 47.30,
|
764 |
-
Architecture: "Qwen2ForCausalLM",
|
765 |
-
Parameters: "14.766B",
|
766 |
-
Chat_Template: "Yes"
|
767 |
},
|
768 |
-
hf_url: "https://huggingface.co/sthenno/tempesthenno-nuslerp-001",
|
769 |
-
known_config: None
|
770 |
},
|
771 |
{
|
772 |
-
rank: 28,
|
773 |
-
name: "bunnycore/Phi-4-Stock-Ex",
|
774 |
-
scores: {
|
775 |
-
average: 39.93,
|
776 |
-
IFEval: 65.75,
|
777 |
-
BBH: 55.20,
|
778 |
-
MATH: 39.12,
|
779 |
-
GPQA: 13.42,
|
780 |
-
MUSR: 17.46,
|
781 |
-
MMLU_PRO: 48.61,
|
782 |
-
Architecture: "LlamaForCausalLM",
|
783 |
-
Parameters: "14.66B",
|
784 |
-
Chat_Template: "Yes"
|
785 |
},
|
786 |
-
hf_url: "https://huggingface.co/bunnycore/Phi-4-Stock-Ex",
|
787 |
-
known_config: None
|
788 |
},
|
789 |
{
|
790 |
-
rank: 29,
|
791 |
-
name: "hotmailuser/QwenSlerp-14B",
|
792 |
-
scores: {
|
793 |
-
average: 39.87,
|
794 |
-
IFEval: 70.25,
|
795 |
-
BBH: 49.42,
|
796 |
-
MATH: 35.50,
|
797 |
-
GPQA: 18.34,
|
798 |
-
MUSR: 16.83,
|
799 |
-
MMLU_PRO: 48.89,
|
800 |
-
Architecture: "Qwen2ForCausalLM",
|
801 |
-
Parameters: "14.766B",
|
802 |
-
Chat_Template: "No"
|
803 |
},
|
804 |
-
hf_url: "https://huggingface.co/hotmailuser/QwenSlerp-14B",
|
805 |
-
known_config: None
|
806 |
},
|
807 |
{
|
808 |
-
rank: 30,
|
809 |
-
name: "sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-model_stock",
|
810 |
-
scores: {
|
811 |
-
average: 39.81,
|
812 |
-
IFEval: 71.62,
|
813 |
-
BBH: 48.76,
|
814 |
-
MATH: 33.99,
|
815 |
-
GPQA: 17.34,
|
816 |
-
MUSR: 19.23,
|
817 |
-
MMLU_PRO: 47.95,
|
818 |
-
Architecture: "Qwen2ForCausalLM",
|
819 |
-
Parameters: "14B",
|
820 |
-
Chat_Template: "No"
|
821 |
},
|
822 |
-
hf_url: "https://huggingface.co/sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-model_stock",
|
823 |
-
known_config: None
|
824 |
},
|
825 |
{
|
826 |
-
rank: 31,
|
827 |
-
name: "tensopolis/virtuoso-small-tensopolis-v1",
|
828 |
-
scores: {
|
829 |
-
average: 39.69,
|
830 |
-
IFEval: 79.50,
|
831 |
-
BBH: 50.70,
|
832 |
-
MATH: 36.03,
|
833 |
-
GPQA: 10.85,
|
834 |
-
MUSR: 14.70,
|
835 |
-
MMLU_PRO: 46.36,
|
836 |
-
Architecture: "Qwen2ForCausalLM",
|
837 |
-
Parameters: "14.77B",
|
838 |
-
Chat_Template: "Yes"
|
839 |
},
|
840 |
-
hf_url: "https://huggingface.co/tensopolis/virtuoso-small-tensopolis-v1",
|
841 |
-
known_config: None
|
842 |
},
|
843 |
{
|
844 |
-
rank: 32,
|
845 |
-
name: "sometimesanotion/Lamarck-14B-v0.6-model_stock",
|
846 |
-
scores: {
|
847 |
-
average: 39.58,
|
848 |
-
IFEval: 67.90,
|
849 |
-
BBH: 46.49,
|
850 |
-
MATH: 35.88,
|
851 |
-
GPQA: 17.90,
|
852 |
-
MUSR: 22.68,
|
853 |
-
MMLU_PRO: 46.64,
|
854 |
-
Architecture: "Qwen2ForCausalLM",
|
855 |
-
Parameters: "14B",
|
856 |
-
Chat_Template: "No"
|
857 |
},
|
858 |
-
hf_url: "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.6-model_stock",
|
859 |
-
known_config: None
|
860 |
},
|
861 |
{
|
862 |
-
rank: 33,
|
863 |
-
name: "tensopolis/virtuoso-small-tensopolis-v2",
|
864 |
-
scores: {
|
865 |
-
average: 39.53,
|
866 |
-
IFEval: 80.20,
|
867 |
-
BBH: 50.23,
|
868 |
-
MATH: 35.27,
|
869 |
-
GPQA: 10.51,
|
870 |
-
MUSR: 14.84,
|
871 |
-
MMLU_PRO: 46.15,
|
872 |
-
Architecture: "Qwen2ForCausalLM",
|
873 |
-
Parameters: "14.77B",
|
874 |
-
Chat_Template: "Yes"
|
875 |
},
|
876 |
-
hf_url: "https://huggingface.co/tensopolis/virtuoso-small-tensopolis-v2",
|
877 |
-
known_config: None
|
878 |
},
|
879 |
{
|
880 |
-
rank: 34,
|
881 |
-
name: "Sakalti/ultiima-14B-v0.3",
|
882 |
-
scores: {
|
883 |
-
average: 39.53,
|
884 |
-
IFEval: 70.40,
|
885 |
-
BBH: 48.45,
|
886 |
-
MATH: 34.52,
|
887 |
-
GPQA: 16.89,
|
888 |
-
MUSR: 18.73,
|
889 |
-
MMLU_PRO: 48.18,
|
890 |
-
Architecture: "Qwen2ForCausalLM",
|
891 |
-
Parameters: "14.766B",
|
892 |
-
Chat_Template: "No"
|
893 |
},
|
894 |
-
hf_url: "https://huggingface.co/Sakalti/ultiima-14B-v0.3",
|
895 |
-
known_config: None
|
896 |
-
}
|
897 |
]
|
898 |
|
899 |
def snippet_scrape_model_page(url):
|
|
|
281 |
# --------------------------------------------------------------------
|
282 |
# This is your larger dataset, rank = 44..105
|
283 |
benchmark_data = [
|
|
|
284 |
{
|
285 |
+
"rank": 1,
|
286 |
+
"name": "sometimesanotion/Lamarck-14B-v0.7-rc4",
|
287 |
+
"scores": {
|
288 |
+
"average": 41.22,
|
289 |
+
"IFEval": 72.11,
|
290 |
+
"BBH": 49.85,
|
291 |
+
"MATH": 36.86,
|
292 |
+
"GPQA": 18.57,
|
293 |
+
"MUSR": 21.07,
|
294 |
+
"MMLU_PRO": 48.89,
|
295 |
+
"Architecture": "Qwen2ForCausalLM",
|
296 |
+
"Parameters": "14.766B",
|
297 |
+
"Chat_Template": "No"
|
298 |
},
|
299 |
+
"hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.7-rc4",
|
300 |
+
"known_config": None
|
301 |
},
|
302 |
{
|
303 |
+
"rank": 2,
|
304 |
+
"name": "arcee-ai/Virtuoso-Small-v2",
|
305 |
+
"scores": {
|
306 |
+
"average": 41.08,
|
307 |
+
"IFEval": 82.73,
|
308 |
+
"BBH": 50.95,
|
309 |
+
"MATH": 38.22,
|
310 |
+
"GPQA": 13.76,
|
311 |
+
"MUSR": 14.28,
|
312 |
+
"MMLU_PRO": 46.53,
|
313 |
+
"Architecture": "Qwen2ForCausalLM",
|
314 |
+
"Parameters": "14.766B",
|
315 |
+
"Chat_Template": "Yes"
|
316 |
},
|
317 |
+
"hf_url": "https://huggingface.co/arcee-ai/Virtuoso-Small-v2",
|
318 |
+
"known_config": None
|
319 |
},
|
320 |
{
|
321 |
+
"rank": 3,
|
322 |
+
"name": "sometimesanotion/Qwenvergence-14B-v12-Prose-DS",
|
323 |
+
"scores": {
|
324 |
+
"average": 41.08,
|
325 |
+
"IFEval": 61.73,
|
326 |
+
"BBH": 49.87,
|
327 |
+
"MATH": 42.30,
|
328 |
+
"GPQA": 19.24,
|
329 |
+
"MUSR": 24.78,
|
330 |
+
"MMLU_PRO": 48.54,
|
331 |
+
"Architecture": "Qwen2ForCausalLM",
|
332 |
+
"Parameters": "14.766B",
|
333 |
+
"Chat_Template": "No"
|
334 |
},
|
335 |
+
"hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v12-Prose-DS",
|
336 |
+
"known_config": None
|
337 |
},
|
338 |
{
|
339 |
+
"rank": 4,
|
340 |
+
"name": "bunnycore/Phi-4-Model-Stock-v4",
|
341 |
+
"scores": {
|
342 |
+
"average": 41.03,
|
343 |
+
"IFEval": 71.10,
|
344 |
+
"BBH": 55.90,
|
345 |
+
"MATH": 37.16,
|
346 |
+
"GPQA": 15.88,
|
347 |
+
"MUSR": 17.30,
|
348 |
+
"MMLU_PRO": 48.82,
|
349 |
+
"Architecture": "LlamaForCausalLM",
|
350 |
+
"Parameters": "14.66B",
|
351 |
+
"Chat_Template": "Yes"
|
352 |
},
|
353 |
+
"hf_url": "https://huggingface.co/bunnycore/Phi-4-Model-Stock-v4",
|
354 |
+
"known_config": None
|
355 |
},
|
356 |
{
|
357 |
+
"rank": 5,
|
358 |
+
"name": "sthenno/tempesthenno-nuslerp-0124",
|
359 |
+
"scores": {
|
360 |
+
"average": 40.97,
|
361 |
+
"IFEval": 70.04,
|
362 |
+
"BBH": 49.28,
|
363 |
+
"MATH": 39.27,
|
364 |
+
"GPQA": 18.68,
|
365 |
+
"MUSR": 20.21,
|
366 |
+
"MMLU_PRO": 48.36,
|
367 |
+
"Architecture": "Qwen2ForCausalLM",
|
368 |
+
"Parameters": "14.766B",
|
369 |
+
"Chat_Template": "No"
|
370 |
},
|
371 |
+
"hf_url": "https://huggingface.co/sthenno/tempesthenno-nuslerp-0124",
|
372 |
+
"known_config": None
|
373 |
},
|
374 |
{
|
375 |
+
"rank": 6,
|
376 |
+
"name": "bunnycore/Phi-4-RR-Shoup",
|
377 |
+
"scores": {
|
378 |
+
"average": 40.95,
|
379 |
+
"IFEval": 65.87,
|
380 |
+
"BBH": 56.11,
|
381 |
+
"MATH": 47.96,
|
382 |
+
"GPQA": 11.63,
|
383 |
+
"MUSR": 14.94,
|
384 |
+
"MMLU_PRO": 49.21,
|
385 |
+
"Architecture": "LlamaForCausalLM",
|
386 |
+
"Parameters": "14.66B",
|
387 |
+
"Chat_Template": "Yes"
|
388 |
},
|
389 |
+
"hf_url": "https://huggingface.co/bunnycore/Phi-4-RR-Shoup",
|
390 |
+
"known_config": None
|
391 |
},
|
392 |
{
|
393 |
+
"rank": 7,
|
394 |
+
"name": "sometimesanotion/Qwenvergence-14B-v10",
|
395 |
+
"scores": {
|
396 |
+
"average": 40.86,
|
397 |
+
"IFEval": 67.57,
|
398 |
+
"BBH": 46.75,
|
399 |
+
"MATH": 44.18,
|
400 |
+
"GPQA": 17.23,
|
401 |
+
"MUSR": 22.33,
|
402 |
+
"MMLU_PRO": 47.10,
|
403 |
+
"Architecture": "Qwen2ForCausalLM",
|
404 |
+
"Parameters": "14.766B",
|
405 |
+
"Chat_Template": "No"
|
406 |
},
|
407 |
+
"hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v10",
|
408 |
+
"known_config": None
|
409 |
},
|
410 |
{
|
411 |
+
"rank": 8,
|
412 |
+
"name": "bunnycore/Phi-4-RStock-v0.1",
|
413 |
+
"scores": {
|
414 |
+
"average": 40.84,
|
415 |
+
"IFEval": 70.03,
|
416 |
+
"BBH": 55.98,
|
417 |
+
"MATH": 38.07,
|
418 |
+
"GPQA": 15.32,
|
419 |
+
"MUSR": 16.73,
|
420 |
+
"MMLU_PRO": 48.90,
|
421 |
+
"Architecture": "LlamaForCausalLM",
|
422 |
+
"Parameters": "14.66B",
|
423 |
+
"Chat_Template": "Yes"
|
424 |
},
|
425 |
+
"hf_url": "https://huggingface.co/bunnycore/Phi-4-RStock-v0.1",
|
426 |
+
"known_config": None
|
427 |
},
|
428 |
{
|
429 |
+
"rank": 9,
|
430 |
+
"name": "jpacifico/Chocolatine-2-14B-Instruct-v2.0b3",
|
431 |
+
"scores": {
|
432 |
+
"average": 40.74,
|
433 |
+
"IFEval": 73.23,
|
434 |
+
"BBH": 49.57,
|
435 |
+
"MATH": 36.93,
|
436 |
+
"GPQA": 17.23,
|
437 |
+
"MUSR": 19.30,
|
438 |
+
"MMLU_PRO": 48.19,
|
439 |
+
"Architecture": "Qwen2ForCausalLM",
|
440 |
+
"Parameters": "14.766B",
|
441 |
+
"Chat_Template": "No"
|
442 |
},
|
443 |
+
"hf_url": "https://huggingface.co/jpacifico/Chocolatine-2-14B-Instruct-v2.0b3",
|
444 |
+
"known_config": None
|
445 |
},
|
446 |
{
|
447 |
+
"rank": 10,
|
448 |
+
"name": "bunnycore/Phi-4-ReasoningRP",
|
449 |
+
"scores": {
|
450 |
+
"average": 40.73,
|
451 |
+
"IFEval": 67.36,
|
452 |
+
"BBH": 55.88,
|
453 |
+
"MATH": 44.34,
|
454 |
+
"GPQA": 12.53,
|
455 |
+
"MUSR": 15.14,
|
456 |
+
"MMLU_PRO": 49.12,
|
457 |
+
"Architecture": "LlamaForCausalLM",
|
458 |
+
"Parameters": "14.66B",
|
459 |
+
"Chat_Template": "Yes"
|
460 |
},
|
461 |
+
"hf_url": "https://huggingface.co/bunnycore/Phi-4-ReasoningRP",
|
462 |
+
"known_config": None
|
463 |
},
|
464 |
{
|
465 |
+
"rank": 11,
|
466 |
+
"name": "sometimesanotion/Lamarck-14B-v0.7-rc1",
|
467 |
+
"scores": {
|
468 |
+
"average": 40.69,
|
469 |
+
"IFEval": 73.05,
|
470 |
+
"BBH": 49.51,
|
471 |
+
"MATH": 35.80,
|
472 |
+
"GPQA": 18.57,
|
473 |
+
"MUSR": 18.13,
|
474 |
+
"MMLU_PRO": 49.06,
|
475 |
+
"Architecture": "Qwen2ForCausalLM",
|
476 |
+
"Parameters": "14.766B",
|
477 |
+
"Chat_Template": "No"
|
478 |
},
|
479 |
+
"hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.7-rc1",
|
480 |
+
"known_config": None
|
481 |
},
|
482 |
{
|
483 |
+
"rank": 12,
|
484 |
+
"name": "jpacifico/Chocolatine-2-14B-Instruct-v2.0b2",
|
485 |
+
"scores": {
|
486 |
+
"average": 40.62,
|
487 |
+
"IFEval": 72.41,
|
488 |
+
"BBH": 49.58,
|
489 |
+
"MATH": 35.73,
|
490 |
+
"GPQA": 17.79,
|
491 |
+
"MUSR": 19.66,
|
492 |
+
"MMLU_PRO": 48.54,
|
493 |
+
"Architecture": "Qwen2ForCausalLM",
|
494 |
+
"Parameters": "14.766B",
|
495 |
+
"Chat_Template": "No"
|
496 |
},
|
497 |
+
"hf_url": "https://huggingface.co/jpacifico/Chocolatine-2-14B-Instruct-v2.0b2",
|
498 |
+
"known_config": None
|
499 |
},
|
500 |
{
|
501 |
+
"rank": 13,
|
502 |
+
"name": "sometimesanotion/Qwenvergence-14B-v11",
|
503 |
+
"scores": {
|
504 |
+
"average": 40.56,
|
505 |
+
"IFEval": 71.92,
|
506 |
+
"BBH": 47.55,
|
507 |
+
"MATH": 40.71,
|
508 |
+
"GPQA": 16.33,
|
509 |
+
"MUSR": 18.76,
|
510 |
+
"MMLU_PRO": 48.08,
|
511 |
+
"Architecture": "Qwen2ForCausalLM",
|
512 |
+
"Parameters": "14.766B",
|
513 |
+
"Chat_Template": "No"
|
514 |
},
|
515 |
+
"hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v11",
|
516 |
+
"known_config": None
|
517 |
},
|
518 |
{
|
519 |
+
"rank": 14,
|
520 |
+
"name": "sthenno/tempesthenno-ppo-ckpt40",
|
521 |
+
"scores": {
|
522 |
+
"average": 40.55,
|
523 |
+
"IFEval": 79.23,
|
524 |
+
"BBH": 50.57,
|
525 |
+
"MATH": 34.21,
|
526 |
+
"GPQA": 17.00,
|
527 |
+
"MUSR": 14.56,
|
528 |
+
"MMLU_PRO": 47.69,
|
529 |
+
"Architecture": "Qwen2ForCausalLM",
|
530 |
+
"Parameters": "14.766B",
|
531 |
+
"Chat_Template": "Yes"
|
532 |
},
|
533 |
+
"hf_url": "https://huggingface.co/sthenno/tempesthenno-ppo-ckpt40",
|
534 |
+
"known_config": None
|
535 |
},
|
536 |
{
|
537 |
+
"rank": 15,
|
538 |
+
"name": "tensopolis/virtuoso-small-v2-tensopolis-v1",
|
539 |
+
"scores": {
|
540 |
+
"average": 40.38,
|
541 |
+
"IFEval": 80.94,
|
542 |
+
"BBH": 50.46,
|
543 |
+
"MATH": 35.88,
|
544 |
+
"GPQA": 13.65,
|
545 |
+
"MUSR": 15.82,
|
546 |
+
"MMLU_PRO": 45.52,
|
547 |
+
"Architecture": "Qwen2ForCausalLM",
|
548 |
+
"Parameters": "14.766B",
|
549 |
+
"Chat_Template": "Yes"
|
550 |
},
|
551 |
+
"hf_url": "https://huggingface.co/tensopolis/virtuoso-small-v2-tensopolis-v1",
|
552 |
+
"known_config": None
|
553 |
},
|
554 |
{
|
555 |
+
"rank": 16,
|
556 |
+
"name": "sometimesanotion/Lamarck-14B-v0.6",
|
557 |
+
"scores": {
|
558 |
+
"average": 40.37,
|
559 |
+
"IFEval": 69.73,
|
560 |
+
"BBH": 49.30,
|
561 |
+
"MATH": 35.65,
|
562 |
+
"GPQA": 18.57,
|
563 |
+
"MUSR": 20.12,
|
564 |
+
"MMLU_PRO": 48.89,
|
565 |
+
"Architecture": "Qwen2ForCausalLM",
|
566 |
+
"Parameters": "14.766B",
|
567 |
+
"Chat_Template": "No"
|
568 |
},
|
569 |
+
"hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.6",
|
570 |
+
"known_config": None
|
571 |
},
|
572 |
{
|
573 |
+
"rank": 17,
|
574 |
+
"name": "sthenno-com/miscii-14b-0130",
|
575 |
+
"scores": {
|
576 |
+
"average": 40.29,
|
577 |
+
"IFEval": 66.47,
|
578 |
+
"BBH": 49.84,
|
579 |
+
"MATH": 38.44,
|
580 |
+
"GPQA": 17.56,
|
581 |
+
"MUSR": 20.96,
|
582 |
+
"MMLU_PRO": 48.48,
|
583 |
+
"Architecture": "Qwen2ForCausalLM",
|
584 |
+
"Parameters": "14.766B",
|
585 |
+
"Chat_Template": "No"
|
586 |
},
|
587 |
+
"hf_url": "https://huggingface.co/sthenno-com/miscii-14b-0130",
|
588 |
+
"known_config": None
|
589 |
},
|
590 |
{
|
591 |
+
"rank": 18,
|
592 |
+
"name": "hotmailuser/QwenSlerp2-14B",
|
593 |
+
"scores": {
|
594 |
+
"average": 40.21,
|
595 |
+
"IFEval": 70.37,
|
596 |
+
"BBH": 49.68,
|
597 |
+
"MATH": 35.73,
|
598 |
+
"GPQA": 17.45,
|
599 |
+
"MUSR": 19.35,
|
600 |
+
"MMLU_PRO": 48.66,
|
601 |
+
"Architecture": "Qwen2ForCausalLM",
|
602 |
+
"Parameters": "14.766B",
|
603 |
+
"Chat_Template": "No"
|
604 |
},
|
605 |
+
"hf_url": "https://huggingface.co/hotmailuser/QwenSlerp2-14B",
|
606 |
+
"known_config": None
|
607 |
},
|
608 |
{
|
609 |
+
"rank": 19,
|
610 |
+
"name": "Sakalti/ultiima-14B-v0.2",
|
611 |
+
"scores": {
|
612 |
+
"average": 40.18,
|
613 |
+
"IFEval": 70.70,
|
614 |
+
"BBH": 49.51,
|
615 |
+
"MATH": 35.27,
|
616 |
+
"GPQA": 17.67,
|
617 |
+
"MUSR": 19.19,
|
618 |
+
"MMLU_PRO": 48.75,
|
619 |
+
"Architecture": "Qwen2ForCausalLM",
|
620 |
+
"Parameters": "14.766B",
|
621 |
+
"Chat_Template": "No"
|
622 |
},
|
623 |
+
"hf_url": "https://huggingface.co/Sakalti/ultiima-14B-v0.2",
|
624 |
+
"known_config": None
|
625 |
},
|
626 |
{
|
627 |
+
"rank": 20,
|
628 |
+
"name": "pankajmathur/orca_mini_phi-4",
|
629 |
+
"scores": {
|
630 |
+
"average": 40.16,
|
631 |
+
"IFEval": 77.81,
|
632 |
+
"BBH": 54.63,
|
633 |
+
"MATH": 26.44,
|
634 |
+
"GPQA": 16.55,
|
635 |
+
"MUSR": 18.25,
|
636 |
+
"MMLU_PRO": 47.28,
|
637 |
+
"Architecture": "LlamaForCausalLM",
|
638 |
+
"Parameters": "14.66B",
|
639 |
+
"Chat_Template": "Yes"
|
640 |
},
|
641 |
+
"hf_url": "https://huggingface.co/pankajmathur/orca_mini_phi-4",
|
642 |
+
"known_config": None
|
643 |
},
|
644 |
{
|
645 |
+
"rank": 21,
|
646 |
+
"name": "pankajmathur/orca_mini_v9_2_14B",
|
647 |
+
"scores": {
|
648 |
+
"average": 40.16,
|
649 |
+
"IFEval": 77.81,
|
650 |
+
"BBH": 54.63,
|
651 |
+
"MATH": 26.44,
|
652 |
+
"GPQA": 16.55,
|
653 |
+
"MUSR": 18.25,
|
654 |
+
"MMLU_PRO": 47.28,
|
655 |
+
"Architecture": "LlamaForCausalLM",
|
656 |
+
"Parameters": "14.66B",
|
657 |
+
"Chat_Template": "Yes"
|
658 |
},
|
659 |
+
"hf_url": "https://huggingface.co/pankajmathur/orca_mini_v9_2_14B",
|
660 |
+
"known_config": None
|
661 |
},
|
662 |
{
|
663 |
+
"rank": 22,
|
664 |
+
"name": "sometimesanotion/Qwen2.5-14B-Vimarckoso-v3",
|
665 |
+
"scores": {
|
666 |
+
"average": 40.10,
|
667 |
+
"IFEval": 72.57,
|
668 |
+
"BBH": 48.58,
|
669 |
+
"MATH": 34.44,
|
670 |
+
"GPQA": 17.34,
|
671 |
+
"MUSR": 19.39,
|
672 |
+
"MMLU_PRO": 48.26,
|
673 |
+
"Architecture": "Qwen2ForCausalLM",
|
674 |
+
"Parameters": "14B",
|
675 |
+
"Chat_Template": "No"
|
676 |
},
|
677 |
+
"hf_url": "https://huggingface.co/sometimesanotion/Qwen2.5-14B-Vimarckoso-v3",
|
678 |
+
"known_config": None
|
679 |
},
|
680 |
{
|
681 |
+
"rank": 23,
|
682 |
+
"name": "sthenno-com/miscii-14b-1225",
|
683 |
+
"scores": {
|
684 |
+
"average": 40.08,
|
685 |
+
"IFEval": 78.78,
|
686 |
+
"BBH": 50.91,
|
687 |
+
"MATH": 31.57,
|
688 |
+
"GPQA": 17.00,
|
689 |
+
"MUSR": 14.77,
|
690 |
+
"MMLU_PRO": 47.46,
|
691 |
+
"Architecture": "Qwen2ForCausalLM",
|
692 |
+
"Parameters": "14.766B",
|
693 |
+
"Chat_Template": "Yes"
|
694 |
},
|
695 |
+
"hf_url": "https://huggingface.co/sthenno-com/miscii-14b-1225",
|
696 |
+
"known_config": None
|
697 |
},
|
698 |
{
|
699 |
+
"rank": 24,
|
700 |
+
"name": "bunnycore/Phi-4-Model-Stock",
|
701 |
+
"scores": {
|
702 |
+
"average": 40.06,
|
703 |
+
"IFEval": 68.79,
|
704 |
+
"BBH": 55.32,
|
705 |
+
"MATH": 38.60,
|
706 |
+
"GPQA": 13.98,
|
707 |
+
"MUSR": 15.12,
|
708 |
+
"MMLU_PRO": 48.54,
|
709 |
+
"Architecture": "LlamaForCausalLM",
|
710 |
+
"Parameters": "14.66B",
|
711 |
+
"Chat_Template": "Yes"
|
712 |
},
|
713 |
+
"hf_url": "https://huggingface.co/bunnycore/Phi-4-Model-Stock",
|
714 |
+
"known_config": None
|
715 |
},
|
716 |
{
|
717 |
+
"rank": 25,
|
718 |
+
"name": "djuna/Q2.5-Veltha-14B-0.5",
|
719 |
+
"scores": {
|
720 |
+
"average": 39.96,
|
721 |
+
"IFEval": 77.96,
|
722 |
+
"BBH": 50.32,
|
723 |
+
"MATH": 33.84,
|
724 |
+
"GPQA": 15.77,
|
725 |
+
"MUSR": 14.17,
|
726 |
+
"MMLU_PRO": 47.72,
|
727 |
+
"Architecture": "Qwen2ForCausalLM",
|
728 |
+
"Parameters": "14.766B",
|
729 |
+
"Chat_Template": "Yes"
|
730 |
},
|
731 |
+
"hf_url": "https://huggingface.co/djuna/Q2.5-Veltha-14B-0.5",
|
732 |
+
"known_config": None
|
733 |
},
|
734 |
{
|
735 |
+
"rank": 26,
|
736 |
+
"name": "ehristoforu/fp4-14b-v1-fix",
|
737 |
+
"scores": {
|
738 |
+
"average": 39.96,
|
739 |
+
"IFEval": 67.42,
|
740 |
+
"BBH": 54.33,
|
741 |
+
"MATH": 39.58,
|
742 |
+
"GPQA": 13.87,
|
743 |
+
"MUSR": 16.18,
|
744 |
+
"MMLU_PRO": 48.37,
|
745 |
+
"Architecture": "LlamaForCausalLM",
|
746 |
+
"Parameters": "14.66B",
|
747 |
+
"Chat_Template": "Yes"
|
748 |
},
|
749 |
+
"hf_url": "https://huggingface.co/ehristoforu/fp4-14b-v1-fix",
|
750 |
+
"known_config": None
|
751 |
},
|
752 |
{
|
753 |
+
"rank": 27,
|
754 |
+
"name": "sthenno/tempesthenno-nuslerp-001",
|
755 |
+
"scores": {
|
756 |
+
"average": 39.94,
|
757 |
+
"IFEval": 79.26,
|
758 |
+
"BBH": 51.04,
|
759 |
+
"MATH": 31.72,
|
760 |
+
"GPQA": 16.44,
|
761 |
+
"MUSR": 13.88,
|
762 |
+
"MMLU_PRO": 47.30,
|
763 |
+
"Architecture": "Qwen2ForCausalLM",
|
764 |
+
"Parameters": "14.766B",
|
765 |
+
"Chat_Template": "Yes"
|
766 |
},
|
767 |
+
"hf_url": "https://huggingface.co/sthenno/tempesthenno-nuslerp-001",
|
768 |
+
"known_config": None
|
769 |
},
|
770 |
{
|
771 |
+
"rank": 28,
|
772 |
+
"name": "bunnycore/Phi-4-Stock-Ex",
|
773 |
+
"scores": {
|
774 |
+
"average": 39.93,
|
775 |
+
"IFEval": 65.75,
|
776 |
+
"BBH": 55.20,
|
777 |
+
"MATH": 39.12,
|
778 |
+
"GPQA": 13.42,
|
779 |
+
"MUSR": 17.46,
|
780 |
+
"MMLU_PRO": 48.61,
|
781 |
+
"Architecture": "LlamaForCausalLM",
|
782 |
+
"Parameters": "14.66B",
|
783 |
+
"Chat_Template": "Yes"
|
784 |
},
|
785 |
+
"hf_url": "https://huggingface.co/bunnycore/Phi-4-Stock-Ex",
|
786 |
+
"known_config": None
|
787 |
},
|
788 |
{
|
789 |
+
"rank": 29,
|
790 |
+
"name": "hotmailuser/QwenSlerp-14B",
|
791 |
+
"scores": {
|
792 |
+
"average": 39.87,
|
793 |
+
"IFEval": 70.25,
|
794 |
+
"BBH": 49.42,
|
795 |
+
"MATH": 35.50,
|
796 |
+
"GPQA": 18.34,
|
797 |
+
"MUSR": 16.83,
|
798 |
+
"MMLU_PRO": 48.89,
|
799 |
+
"Architecture": "Qwen2ForCausalLM",
|
800 |
+
"Parameters": "14.766B",
|
801 |
+
"Chat_Template": "No"
|
802 |
},
|
803 |
+
"hf_url": "https://huggingface.co/hotmailuser/QwenSlerp-14B",
|
804 |
+
"known_config": None
|
805 |
},
|
806 |
{
|
807 |
+
"rank": 30,
|
808 |
+
"name": "sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-model_stock",
|
809 |
+
"scores": {
|
810 |
+
"average": 39.81,
|
811 |
+
"IFEval": 71.62,
|
812 |
+
"BBH": 48.76,
|
813 |
+
"MATH": 33.99,
|
814 |
+
"GPQA": 17.34,
|
815 |
+
"MUSR": 19.23,
|
816 |
+
"MMLU_PRO": 47.95,
|
817 |
+
"Architecture": "Qwen2ForCausalLM",
|
818 |
+
"Parameters": "14B",
|
819 |
+
"Chat_Template": "No"
|
820 |
},
|
821 |
+
"hf_url": "https://huggingface.co/sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-model_stock",
|
822 |
+
"known_config": None
|
823 |
},
|
824 |
{
|
825 |
+
"rank": 31,
|
826 |
+
"name": "tensopolis/virtuoso-small-tensopolis-v1",
|
827 |
+
"scores": {
|
828 |
+
"average": 39.69,
|
829 |
+
"IFEval": 79.50,
|
830 |
+
"BBH": 50.70,
|
831 |
+
"MATH": 36.03,
|
832 |
+
"GPQA": 10.85,
|
833 |
+
"MUSR": 14.70,
|
834 |
+
"MMLU_PRO": 46.36,
|
835 |
+
"Architecture": "Qwen2ForCausalLM",
|
836 |
+
"Parameters": "14.77B",
|
837 |
+
"Chat_Template": "Yes"
|
838 |
},
|
839 |
+
"hf_url": "https://huggingface.co/tensopolis/virtuoso-small-tensopolis-v1",
|
840 |
+
"known_config": None
|
841 |
},
|
842 |
{
|
843 |
+
"rank": 32,
|
844 |
+
"name": "sometimesanotion/Lamarck-14B-v0.6-model_stock",
|
845 |
+
"scores": {
|
846 |
+
"average": 39.58,
|
847 |
+
"IFEval": 67.90,
|
848 |
+
"BBH": 46.49,
|
849 |
+
"MATH": 35.88,
|
850 |
+
"GPQA": 17.90,
|
851 |
+
"MUSR": 22.68,
|
852 |
+
"MMLU_PRO": 46.64,
|
853 |
+
"Architecture": "Qwen2ForCausalLM",
|
854 |
+
"Parameters": "14B",
|
855 |
+
"Chat_Template": "No"
|
856 |
},
|
857 |
+
"hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.6-model_stock",
|
858 |
+
"known_config": None
|
859 |
},
|
860 |
{
|
861 |
+
"rank": 33,
|
862 |
+
"name": "tensopolis/virtuoso-small-tensopolis-v2",
|
863 |
+
"scores": {
|
864 |
+
"average": 39.53,
|
865 |
+
"IFEval": 80.20,
|
866 |
+
"BBH": 50.23,
|
867 |
+
"MATH": 35.27,
|
868 |
+
"GPQA": 10.51,
|
869 |
+
"MUSR": 14.84,
|
870 |
+
"MMLU_PRO": 46.15,
|
871 |
+
"Architecture": "Qwen2ForCausalLM",
|
872 |
+
"Parameters": "14.77B",
|
873 |
+
"Chat_Template": "Yes"
|
874 |
},
|
875 |
+
"hf_url": "https://huggingface.co/tensopolis/virtuoso-small-tensopolis-v2",
|
876 |
+
"known_config": None
|
877 |
},
|
878 |
{
|
879 |
+
"rank": 34,
|
880 |
+
"name": "Sakalti/ultiima-14B-v0.3",
|
881 |
+
"scores": {
|
882 |
+
"average": 39.53,
|
883 |
+
"IFEval": 70.40,
|
884 |
+
"BBH": 48.45,
|
885 |
+
"MATH": 34.52,
|
886 |
+
"GPQA": 16.89,
|
887 |
+
"MUSR": 18.73,
|
888 |
+
"MMLU_PRO": 48.18,
|
889 |
+
"Architecture": "Qwen2ForCausalLM",
|
890 |
+
"Parameters": "14.766B",
|
891 |
+
"Chat_Template": "No"
|
892 |
},
|
893 |
+
"hf_url": "https://huggingface.co/Sakalti/ultiima-14B-v0.3",
|
894 |
+
"known_config": None
|
895 |
+
}
|
896 |
]
|
897 |
|
898 |
def snippet_scrape_model_page(url):
|