@inproceedings{caselli-etal-2021-protest,
title = "{PROTEST}-{ER}: Retraining {BERT} for Protest Event Extraction",
author = {Caselli, Tommaso and
Mutlu, Osman and
Basile, Angelo and
H{\"u}rriyeto{\u{g}}lu, Ali},
editor = {H{\"u}rriyeto{\u{g}}lu, Ali},
booktitle = "Proceedings of the 4th Workshop on Challenges and Applications of Automated Extraction of Socio-political Events from Text (CASE 2021)",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.case-1.4/",
doi = "10.18653/v1/2021.case-1.4",
pages = "12--19",
abstract = "We analyze the effect of further retraining BERT with different domain specific data as an unsupervised domain adaptation strategy for event extraction. Portability of event extraction models is particularly challenging, with large performance drops affecting data on the same text genres (e.g., news). We present PROTEST-ER, a retrained BERT model for protest event extraction. PROTEST-ER outperforms a corresponding generic BERT on out-of-domain data of 8.1 points. Our best performing models reach 51.91-46.39 F1 across both domains."
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="caselli-etal-2021-protest">
<titleInfo>
<title>PROTEST-ER: Retraining BERT for Protest Event Extraction</title>
</titleInfo>
<name type="personal">
<namePart type="given">Tommaso</namePart>
<namePart type="family">Caselli</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Osman</namePart>
<namePart type="family">Mutlu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Angelo</namePart>
<namePart type="family">Basile</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ali</namePart>
<namePart type="family">Hürriyetoğlu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-08</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 4th Workshop on Challenges and Applications of Automated Extraction of Socio-political Events from Text (CASE 2021)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Ali</namePart>
<namePart type="family">Hürriyetoğlu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We analyze the effect of further retraining BERT with different domain specific data as an unsupervised domain adaptation strategy for event extraction. Portability of event extraction models is particularly challenging, with large performance drops affecting data on the same text genres (e.g., news). We present PROTEST-ER, a retrained BERT model for protest event extraction. PROTEST-ER outperforms a corresponding generic BERT on out-of-domain data of 8.1 points. Our best performing models reach 51.91-46.39 F1 across both domains.</abstract>
<identifier type="citekey">caselli-etal-2021-protest</identifier>
<identifier type="doi">10.18653/v1/2021.case-1.4</identifier>
<location>
<url>https://aclanthology.org/2021.case-1.4/</url>
</location>
<part>
<date>2021-08</date>
<extent unit="page">
<start>12</start>
<end>19</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T PROTEST-ER: Retraining BERT for Protest Event Extraction
%A Caselli, Tommaso
%A Mutlu, Osman
%A Basile, Angelo
%A Hürriyetoğlu, Ali
%Y Hürriyetoğlu, Ali
%S Proceedings of the 4th Workshop on Challenges and Applications of Automated Extraction of Socio-political Events from Text (CASE 2021)
%D 2021
%8 August
%I Association for Computational Linguistics
%C Online
%F caselli-etal-2021-protest
%X We analyze the effect of further retraining BERT with different domain specific data as an unsupervised domain adaptation strategy for event extraction. Portability of event extraction models is particularly challenging, with large performance drops affecting data on the same text genres (e.g., news). We present PROTEST-ER, a retrained BERT model for protest event extraction. PROTEST-ER outperforms a corresponding generic BERT on out-of-domain data of 8.1 points. Our best performing models reach 51.91-46.39 F1 across both domains.
%R 10.18653/v1/2021.case-1.4
%U https://aclanthology.org/2021.case-1.4/
%U https://doi.org/10.18653/v1/2021.case-1.4
%P 12-19
Markdown (Informal)
[PROTEST-ER: Retraining BERT for Protest Event Extraction](https://aclanthology.org/2021.case-1.4/) (Caselli et al., CASE 2021)
ACL
- Tommaso Caselli, Osman Mutlu, Angelo Basile, and Ali Hürriyetoğlu. 2021. PROTEST-ER: Retraining BERT for Protest Event Extraction. In Proceedings of the 4th Workshop on Challenges and Applications of Automated Extraction of Socio-political Events from Text (CASE 2021), pages 12–19, Online. Association for Computational Linguistics.