Evaluation

ID
ee693b8f-a1e9-438a-baca-e1c1cd0caf12
Submission ID
1399581e-8f36-45d3-8c48-84fda4052e5a
Method ID
b1187f72-4ce4-4eed-9b5a-ffb9d76847b0
Status
Succeeded
User
User Mugshot joeran.bosma 
Challenge
DRAGON
Phase
Synthetic
Algorithm
DRAGON RoBERTa Base Mixed-domain (Image Version cbca1b17-1266-4dc5-a00b-a77bbd4fe91f )
Submission created
May 9, 2024, 8:15 a.m.
Result created
May 9, 2024, 8:15 a.m.
Position on leaderboard
7

Metrics

{
  "case": {
    "Task103_Example_mednli": {
      "Task103_Example_mednli-fold0": 0.5020242914979758
    },
    "Task106_Example_sl_reg": {
      "Task106_Example_sl_reg-fold0": 0.7301702458026293
    },
    "Task107_Example_ml_reg": {
      "Task107_Example_ml_reg-fold0": 0.7686782063738717
    },
    "Task108_Example_sl_ner": {
      "Task108_Example_sl_ner-fold0": 0.28178493619670086
    },
    "Task109_Example_ml_ner": {
      "Task109_Example_ml_ner-fold0": 0.7487301760060378
    },
    "Task102_Example_sl_mc_clf": {
      "Task102_Example_sl_mc_clf-fold0": 0.6424970208085068
    },
    "Task105_Example_ml_mc_clf": {
      "Task105_Example_ml_mc_clf-fold0": 0.8197453175777388
    },
    "Task101_Example_sl_bin_clf": {
      "Task101_Example_sl_bin_clf-fold0": 0.6994134897360703
    },
    "Task104_Example_ml_bin_clf": {
      "Task104_Example_ml_bin_clf-fold0": 0.9186991238426644
    }
  },
  "aggregates": {
    "overall": {
      "std": 0.0,
      "mean": 0.6790825342046883
    },
    "Task103_Example_mednli": {
      "std": 0.0,
      "mean": 0.5020242914979758
    },
    "Task106_Example_sl_reg": {
      "std": 0.0,
      "mean": 0.7301702458026293
    },
    "Task107_Example_ml_reg": {
      "std": 0.0,
      "mean": 0.7686782063738717
    },
    "Task108_Example_sl_ner": {
      "std": 0.0,
      "mean": 0.28178493619670086
    },
    "Task109_Example_ml_ner": {
      "std": 0.0,
      "mean": 0.7487301760060378
    },
    "Task102_Example_sl_mc_clf": {
      "std": 0.0,
      "mean": 0.6424970208085068
    },
    "Task105_Example_ml_mc_clf": {
      "std": 0.0,
      "mean": 0.8197453175777388
    },
    "Task101_Example_sl_bin_clf": {
      "std": 0.0,
      "mean": 0.6994134897360703
    },
    "Task104_Example_ml_bin_clf": {
      "std": 0.0,
      "mean": 0.9186991238426644
    }
  }
}