From 7d837cc1427c1bb1e9e057e758615bd35e8e0ebd Mon Sep 17 00:00:00 2001 From: Zdenek Kasner Date: Thu, 1 Aug 2024 16:39:03 +0200 Subject: [PATCH 1/6] Move configuration of annotator instructions to UI and campaign metadata --- factgenie/config.yml | 2 +- factgenie/config/crowdsourcing/example.yaml | 22 +++++++++ factgenie/main.py | 9 +--- factgenie/static/css/custom.css | 5 +++ factgenie/static/js/factgenie.js | 12 +++++ .../templates/campaigns/annotate_default.html | 45 +++---------------- factgenie/templates/crowdsourcing_new.html | 36 ++++++++++++++- factgenie/utils.py | 27 +++++++++++ setup.py | 1 + 9 files changed, 110 insertions(+), 49 deletions(-) diff --git a/factgenie/config.yml b/factgenie/config.yml index 62eecddc..bc2cef0c 100644 --- a/factgenie/config.yml +++ b/factgenie/config.yml @@ -1,5 +1,5 @@ --- -debug: false +debug: true host_prefix: "" logging_level: INFO login: diff --git a/factgenie/config/crowdsourcing/example.yaml b/factgenie/config/crowdsourcing/example.yaml index db275d6c..c4bc0e3a 100644 --- a/factgenie/config/crowdsourcing/example.yaml +++ b/factgenie/config/crowdsourcing/example.yaml @@ -15,3 +15,25 @@ sort_order: dataset-level # time after which an unfinished example can be re-assigned to a new annotator (minutes) idle_time: 120 completion_code: "ABCDE" +annotator_instructions: | + In this task, you will annotate outputs of an automatic text generation system. For each example, you will see **data** on the left side and the corresponding generated **text** on the right side. Your task is to **annotate errors** in the text with respect to the data. + + There are five types of errors that you can mark in the generated text: + + 1. Incorrect fact: The fact in the text contradicts the data. + 2. Not checkable : The fact in the text cannot be checked given the data. + 3. Misleading: The fact in the text is misleading in the given context. + 4. Other : The text is problematic for another reason, e.g. grammatically or stylistically incorrect, irrelevant, or repetitive. + + You can annotate the errors by dragging your mouse over the text, highlighting the error span. + + Once you think you have marked all the errors present in the text, click the **✅ Mark example as complete** button (you can still update the annotation later). + + You will be able to submit the annotations once they are all are marked as complete. +annotator_prompt: "Please annotate all the errors in the text:" +final_message: | + Your annotations have been submitted. + + Your Prolific completion code is **{{ compl_code }}**. + + [Return to Prolific](https://app.prolific.co/submissions/complete?cc={{ compl_code }}) \ No newline at end of file diff --git a/factgenie/main.py b/factgenie/main.py index 2f17b2f4..d8acd278 100755 --- a/factgenie/main.py +++ b/factgenie/main.py @@ -290,13 +290,8 @@ def crowdsourcing_create(): indent=4, ) - # copy templates/campaigns/annotate_default.html into templates/campaigns/{campaign_id} as "annotate.html" - os.makedirs(os.path.join(TEMPLATES_DIR, "campaigns", campaign_id), exist_ok=True) - - shutil.copy( - os.path.join(TEMPLATES_DIR, "campaigns", "annotate_default.html"), - os.path.join(TEMPLATES_DIR, "campaigns", campaign_id, "annotate.html"), - ) + # prepare the crowdsourcing HTML page + utils.create_crowdsourcing_page(campaign_id, config) # create the campaign object campaign = HumanCampaign(campaign_id=campaign_id) diff --git a/factgenie/static/css/custom.css b/factgenie/static/css/custom.css index c08072b8..b076b21b 100755 --- a/factgenie/static/css/custom.css +++ b/factgenie/static/css/custom.css @@ -828,4 +828,9 @@ a:hover { border-style: solid; border-radius: 10px; padding: 30px; +} + +.CodeMirror, +.CodeMirror-scroll { + min-height: 150px !important; } \ No newline at end of file diff --git a/factgenie/static/js/factgenie.js b/factgenie/static/js/factgenie.js index a23909fa..753de88a 100644 --- a/factgenie/static/js/factgenie.js +++ b/factgenie/static/js/factgenie.js @@ -595,6 +595,9 @@ function gatherConfig() { var config = {}; if (window.mode == "crowdsourcing") { + config.annotatorInstructions = annotatorInstructionsMDE.value(); + config.annotatorPrompt = $("#annotatorPrompt").val(); + config.finalMessage = finalMessageMDE.value(); config.examplesPerBatch = $("#examplesPerBatch").val(); config.idleTime = $("#idleTime").val(); config.completionCode = $("#completionCode").val(); @@ -973,6 +976,9 @@ function updateCrowdsourcingConfig() { const crowdsourcingConfig = $('#crowdsourcingConfig').val(); if (crowdsourcingConfig === "[None]") { + annotatorInstructionsMDE.value(""); + $("#annotatorPrompt").val(""); + finalMessageMDE.value(""); $("#examplesPerBatch").val(""); $("#idleTime").val(""); $("#completionCode").val(""); @@ -981,12 +987,18 @@ function updateCrowdsourcingConfig() { } const cfg = window.configs[crowdsourcingConfig]; + const annotatorInstructions = cfg.annotator_instructions; + const annotatorPrompt = cfg.annotator_prompt; + const finalMessage = cfg.final_message; const examplesPerBatch = cfg.examples_per_batch; const idleTime = cfg.idle_time; const completionCode = cfg.completion_code; const sortOrder = cfg.sort_order; const annotationSpanCategories = cfg.annotation_span_categories; + annotatorInstructionsMDE.value(annotatorInstructions); + $("#annotatorPrompt").val(annotatorPrompt); + finalMessageMDE.value(finalMessage); $("#examplesPerBatch").val(examplesPerBatch); $("#idleTime").val(idleTime); $("#completionCode").val(completionCode); diff --git a/factgenie/templates/campaigns/annotate_default.html b/factgenie/templates/campaigns/annotate_default.html index 77e70897..f017cfe4 100755 --- a/factgenie/templates/campaigns/annotate_default.html +++ b/factgenie/templates/campaigns/annotate_default.html @@ -2,7 +2,7 @@ - Default Annotation Page + Annotations @@ -80,31 +80,7 @@

- TODO: Place your instructions here. - -
- -

- There are {{ metadata.annotation_span_categories | length }} types of errors that you can mark in - the - generated text:

-
    - {% for annotation_span_category in metadata.annotation_span_categories %} -
  1. {{ - annotation_span_category.name - }}
  2. - {% endfor %} -
-

- You can annotate the errors by dragging your mouse over the text, highlighting the error span. - Once you think you have marked all the errors present in the text, click the ✅ Mark example as - complete - button (you can still update the annotation later). You will be able to submit the annotations - once they - are all are - marked - as complete. -

+ { FACTGENIE_PLACEHOLDER: instructions }
@@ -112,7 +88,7 @@

-
Please mark all the factually incorrect words in the text: +
{ FACTGENIE_PLACEHOLDER: annotator_prompt }
@@ -138,15 +114,7 @@

Welcome!

-

This is a template which should be customized for your task.

-

This overlay will appear when the annotator first opens the page. It can be used to provide instructions or - information about the task.

-

Opening the preview does not reserve a particular batch, although you can still save the annotations - (they will be saved with a Prolific id test and the batch will be marked as finished).

-

You can customize this page by editing the template file in - factgenie/templates/campaigns/{{ metadata.id }}/annotate.html. -

- + { FACTGENIE_PLACEHOLDER: instructions }
@@ -155,10 +123,7 @@

Welcome!

Thank you!

-

Your annotations have been submitted.

-

Your Prolific completion code is {{ compl_code }}.

- Return to - Prolific + { FACTGENIE_PLACEHOLDER: final_message }
diff --git a/factgenie/templates/crowdsourcing_new.html b/factgenie/templates/crowdsourcing_new.html index 4b817f0d..0781477b 100755 --- a/factgenie/templates/crowdsourcing_new.html +++ b/factgenie/templates/crowdsourcing_new.html @@ -11,6 +11,9 @@ + + + {% include 'navbar.html' %} @@ -77,6 +80,28 @@

New crowdsourcing campaign

+ +
+ + + +
+ +
+ + + +
+ +
+ + + +