@inproceedings{barikbin-2022-slpl,
title = "{SLPL}-Sentiment at {S}em{E}val-2022 Task 10: Making Use of Pre-Trained Model`s Attention Values in Structured Sentiment Analysis",
author = "Barikbin, Sadrodin",
editor = "Emerson, Guy and
Schluter, Natalie and
Stanovsky, Gabriel and
Kumar, Ritesh and
Palmer, Alexis and
Schneider, Nathan and
Singh, Siddharth and
Ratan, Shyam",
booktitle = "Proceedings of the 16th International Workshop on Semantic Evaluation (SemEval-2022)",
month = jul,
year = "2022",
address = "Seattle, United States",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2022.semeval-1.192/",
doi = "10.18653/v1/2022.semeval-1.192",
pages = "1382--1388",
abstract = "Sentiment analysis is a useful problem which could serve a variety of fields from business intelligence to social studies and even health studies. Using SemEval 2022 Task 10 formulation of this problem and taking sequence labeling as our approach, we propose a model which learns the task by finetuning a pretrained transformer, introducing as few parameters ({\textasciitilde}150k) as possible and making use of precomputed attention values in the transformer. Our model improves shared task baselines on all task datasets."
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="barikbin-2022-slpl">
<titleInfo>
<title>SLPL-Sentiment at SemEval-2022 Task 10: Making Use of Pre-Trained Model‘s Attention Values in Structured Sentiment Analysis</title>
</titleInfo>
<name type="personal">
<namePart type="given">Sadrodin</namePart>
<namePart type="family">Barikbin</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-07</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 16th International Workshop on Semantic Evaluation (SemEval-2022)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Guy</namePart>
<namePart type="family">Emerson</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Natalie</namePart>
<namePart type="family">Schluter</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Gabriel</namePart>
<namePart type="family">Stanovsky</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ritesh</namePart>
<namePart type="family">Kumar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alexis</namePart>
<namePart type="family">Palmer</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Nathan</namePart>
<namePart type="family">Schneider</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Siddharth</namePart>
<namePart type="family">Singh</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Shyam</namePart>
<namePart type="family">Ratan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Seattle, United States</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Sentiment analysis is a useful problem which could serve a variety of fields from business intelligence to social studies and even health studies. Using SemEval 2022 Task 10 formulation of this problem and taking sequence labeling as our approach, we propose a model which learns the task by finetuning a pretrained transformer, introducing as few parameters (~150k) as possible and making use of precomputed attention values in the transformer. Our model improves shared task baselines on all task datasets.</abstract>
<identifier type="citekey">barikbin-2022-slpl</identifier>
<identifier type="doi">10.18653/v1/2022.semeval-1.192</identifier>
<location>
<url>https://aclanthology.org/2022.semeval-1.192/</url>
</location>
<part>
<date>2022-07</date>
<extent unit="page">
<start>1382</start>
<end>1388</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T SLPL-Sentiment at SemEval-2022 Task 10: Making Use of Pre-Trained Model‘s Attention Values in Structured Sentiment Analysis
%A Barikbin, Sadrodin
%Y Emerson, Guy
%Y Schluter, Natalie
%Y Stanovsky, Gabriel
%Y Kumar, Ritesh
%Y Palmer, Alexis
%Y Schneider, Nathan
%Y Singh, Siddharth
%Y Ratan, Shyam
%S Proceedings of the 16th International Workshop on Semantic Evaluation (SemEval-2022)
%D 2022
%8 July
%I Association for Computational Linguistics
%C Seattle, United States
%F barikbin-2022-slpl
%X Sentiment analysis is a useful problem which could serve a variety of fields from business intelligence to social studies and even health studies. Using SemEval 2022 Task 10 formulation of this problem and taking sequence labeling as our approach, we propose a model which learns the task by finetuning a pretrained transformer, introducing as few parameters (~150k) as possible and making use of precomputed attention values in the transformer. Our model improves shared task baselines on all task datasets.
%R 10.18653/v1/2022.semeval-1.192
%U https://aclanthology.org/2022.semeval-1.192/
%U https://doi.org/10.18653/v1/2022.semeval-1.192
%P 1382-1388
Markdown (Informal)
[SLPL-Sentiment at SemEval-2022 Task 10: Making Use of Pre-Trained Model’s Attention Values in Structured Sentiment Analysis](https://aclanthology.org/2022.semeval-1.192/) (Barikbin, SemEval 2022)
ACL