-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Publication added, CVs and metrics update
- Loading branch information
Showing
5 changed files
with
17 additions
and
15 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,14 +1,16 @@ | ||
@inproceedings{singh2024crowdsourcing, | ||
@inproceedings{10.1007/978-3-031-71210-4_5, | ||
title = {{Crowdsourcing Statement Classification to Enhance Information Quality Prediction}}, | ||
author = {Singh, Jaspreet and Soprano, Michael and Roitero, Kevin and Ceolin, Davide}, | ||
year = 2024, | ||
month = 8, | ||
booktitle = {{Proceedings of the 6th Multidisciplinary International Symposium on Disinformation in Online Open Media}}, | ||
booktitle = {Disinformation in Open Online Media - 6th Multidisciplinary International Symposium (MISDOOM 2024)}, | ||
location = {Münster, Germany}, | ||
publisher = {Springer}, | ||
publisher = {Springer Nature Switzerland}, | ||
address = {Cham}, | ||
series = {Lecture Notes in Computer Science}, | ||
pages = {70--85}, | ||
isbn = {978-3-031-71210-4}, | ||
numpages = 15, | ||
note = {Accepted for publication on June 21, 2024}, | ||
keywords = {Crowdsourcing Annotation, Information Quality Assessment, Argument Type Identification}, | ||
editor = {Preuss, Mike and Leszkiewicz, Agata and Boucher, Jean-Christopher and Fridman, Ofer and Stampe, Lucas}, | ||
abstract = {This paper explores the use of crowdsourcing to classify statement types in film reviews to assess their information quality. Employing the Argument Type Identification Procedure which uses the Periodic Table of Arguments to categorize arguments, the study aims to connect statement types to the overall argument strength and information reliability. Focusing on non-expert annotators in a crowdsourcing environment, the research assesses their reliability based on various factors including language proficiency and annotation experience. Results indicate the importance of careful annotator selection and training to achieve high inter-annotator agreement and highlight challenges in crowdsourcing statement classification for information quality assessment.} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Binary file not shown.
Binary file not shown.