Evaluation

ID
7d54eb0c-b18d-4916-9d99-763e6b1d645f
Submission ID
3fe47aeb-7bf0-4b02-a6c1-f67b5125e116
Method ID
4c2cf1a4-c2cb-44d4-97cb-123c7d4716d0
Status
Succeeded
User
User Mugshot LucBuiltjes 
Challenge
DRAGON
Phase
Synthetic
Algorithm
llm-extractinator deepseek-r1-14b (Image Version 4388574d-e8dc-47df-b340-a54584bd7f2e Model version c638a732-dc73-4a01-abcd-23f3d30085c5 )
Submission created
May 2, 2025, 10:27 a.m.
Result created
May 2, 2025, 10:27 a.m.
Position on leaderboard
7

Metrics

{
  "case": {
    "Task103_Example_mednli": {
      "Task103_Example_mednli-fold0": 0.9875518672199171
    },
    "Task106_Example_sl_reg": {
      "Task106_Example_sl_reg-fold0": 0.8923057199616882
    },
    "Task107_Example_ml_reg": {
      "Task107_Example_ml_reg-fold0": 0.7536047375462696
    },
    "Task108_Example_sl_ner": {
      "Task108_Example_sl_ner-fold0": 0.3627221517230927
    },
    "Task109_Example_ml_ner": {
      "Task109_Example_ml_ner-fold0": 0.49192313675898397
    },
    "Task102_Example_sl_mc_clf": {
      "Task102_Example_sl_mc_clf-fold0": 0.9238288688587025
    },
    "Task105_Example_ml_mc_clf": {
      "Task105_Example_ml_mc_clf-fold0": 0.78228384552258
    },
    "Task101_Example_sl_bin_clf": {
      "Task101_Example_sl_bin_clf-fold0": 0.9886363636363636
    },
    "Task104_Example_ml_bin_clf": {
      "Task104_Example_ml_bin_clf-fold0": 0.7723311377617598
    }
  },
  "aggregates": {
    "overall": {
      "std": 0.0,
      "mean": 0.7727986476654841
    },
    "Task103_Example_mednli": {
      "std": 0.0,
      "mean": 0.9875518672199171
    },
    "Task106_Example_sl_reg": {
      "std": 0.0,
      "mean": 0.8923057199616882
    },
    "Task107_Example_ml_reg": {
      "std": 0.0,
      "mean": 0.7536047375462696
    },
    "Task108_Example_sl_ner": {
      "std": 0.0,
      "mean": 0.3627221517230927
    },
    "Task109_Example_ml_ner": {
      "std": 0.0,
      "mean": 0.49192313675898397
    },
    "Task102_Example_sl_mc_clf": {
      "std": 0.0,
      "mean": 0.9238288688587025
    },
    "Task105_Example_ml_mc_clf": {
      "std": 0.0,
      "mean": 0.78228384552258
    },
    "Task101_Example_sl_bin_clf": {
      "std": 0.0,
      "mean": 0.9886363636363636
    },
    "Task104_Example_ml_bin_clf": {
      "std": 0.0,
      "mean": 0.7723311377617598
    }
  }
}