Skip to content

Commit

Permalink
Publication added, CVs and metrics update
Browse files Browse the repository at this point in the history
  • Loading branch information
Miccighel committed Sep 1, 2024
1 parent 18a8522 commit 6638635
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 15 deletions.
8 changes: 4 additions & 4 deletions content/home/metrics.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@ title: Metrics

| | Google Scholar | Scopus | ACM Digital Library |
|---------------------|----------------|--------|---------------------|
| Articles | 23 | 19 | 13 |
| Citations | 296 | 164 | 87 |
| Citations / Article | 12.86 | 8.69 | 6.69 |
| Articles | 23 | 19 | 14 |
| Citations | 300 | 164 | 87 |
| Citations / Article | 13.04 | 8.69 | 6.21 |
| h-index | 8 | 6 | - |
| i10-index | 7 | - | - |

*Last update: August 19, 2024*
*Last update: September 1, 2024*
14 changes: 8 additions & 6 deletions content/publication/conference-paper-misdoom-2024/cite.bib
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
@inproceedings{singh2024crowdsourcing,
@inproceedings{10.1007/978-3-031-71210-4_5,
title = {{Crowdsourcing Statement Classification to Enhance Information Quality Prediction}},
author = {Singh, Jaspreet and Soprano, Michael and Roitero, Kevin and Ceolin, Davide},
year = 2024,
month = 8,
booktitle = {{Proceedings of the 6th Multidisciplinary International Symposium on Disinformation in Online Open Media}},
booktitle = {Disinformation in Open Online Media - 6th Multidisciplinary International Symposium (MISDOOM 2024)},
location = {Münster, Germany},
publisher = {Springer},
publisher = {Springer Nature Switzerland},
address = {Cham},
series = {Lecture Notes in Computer Science},
pages = {70--85},
isbn = {978-3-031-71210-4},
numpages = 15,
note = {Accepted for publication on June 21, 2024},
keywords = {Crowdsourcing Annotation, Information Quality Assessment, Argument Type Identification},
editor = {Preuss, Mike and Leszkiewicz, Agata and Boucher, Jean-Christopher and Fridman, Ofer and Stampe, Lucas},
abstract = {This paper explores the use of crowdsourcing to classify statement types in film reviews to assess their information quality. Employing the Argument Type Identification Procedure which uses the Periodic Table of Arguments to categorize arguments, the study aims to connect statement types to the overall argument strength and information reliability. Focusing on non-expert annotators in a crowdsourcing environment, the research assesses their reliability based on various factors including language proficiency and annotation experience. Results indicate the importance of careful annotator selection and training to achieve high inter-annotator agreement and highlight challenges in crowdsourcing statement classification for information quality assessment.}
}
}
10 changes: 5 additions & 5 deletions content/publication/conference-paper-misdoom-2024/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ tags:
- information quality assessment
- argument type identification
categories: []
date: '2024-06-22'
lastmod: 2024-06-22T17:46:00+01:00
date: '2024-09-01'
lastmod: 2024-09-01T10:00:00+01:00
featured: false
draft: false

Expand All @@ -33,9 +33,9 @@ image:
# E.g. `projects = ["internal-project"]` references `content/project/deep-learning/index.md`.
# Otherwise, set `projects = []`.
projects: []
publishDate: '2024-06-22T14:36:35.998652Z'
publishDate: '2024-09-01T10:00:00:35.998652Z'
publication_types:
- '1'
abstract: 'Due to their relatively low cost and ability to scale, crowdsourcing based approaches are widely used to collect a large amount of human annotated data. To this aim, multiple crowdsourcing platforms exist, where requesters can upload tasks and workers can carry them out and obtain payment in return. Such platforms share a task design and deploy workflow that is often counter-intuitive and cumbersome. To address this issue, we propose Crowd_Frame, a simple and complete framework which allows to develop and deploy diverse types of complex crowdsourcing tasks in an easy and customizable way. We show the abilities of the proposed framework and we make it available to researchers and practitioners.'
publication: '*Proceedings of the 6th Multidisciplinary International Symposium on Disinformation in Online Open Media (MISDOOM 2024). Münster, Germany. Accepted for publication on June 21, 2024.*'
abstract: 'This paper explores the use of crowdsourcing to classify statement types in film reviews to assess their information quality. Employing the Argument Type Identification Procedure which uses the Periodic Table of Arguments to categorize arguments, the study aims to connect statement types to the overall argument strength and information reliability. Focusing on non-expert annotators in a crowdsourcing environment, the research assesses their reliability based on various factors including language proficiency and annotation experience. Results indicate the importance of careful annotator selection and training to achieve high inter-annotator agreement and highlight challenges in crowdsourcing statement classification for information quality assessment.'
publication: '*Disinformation in Open Online Media - 6th Multidisciplinary International Symposium (MISDOOM 2024). Münster, Germany.*'
---
Binary file modified static/media/Curriculum_EN.pdf
Binary file not shown.
Binary file modified static/media/Curriculum_IT.pdf
Binary file not shown.

0 comments on commit 6638635

Please sign in to comment.