@inproceedings{kulkarni-bhattacharyya-2021-retrofitting,
title = "Retrofitting of Pre-trained Emotion Words with {VAD}-dimensions and the {P}lutchik Emotions",
author = "Kulkarni, Manasi and
Bhattacharyya, Pushpak",
editor = "Bandyopadhyay, Sivaji and
Devi, Sobha Lalitha and
Bhattacharyya, Pushpak",
booktitle = "Proceedings of the 18th International Conference on Natural Language Processing (ICON)",
month = dec,
year = "2021",
address = "National Institute of Technology Silchar, Silchar, India",
publisher = "NLP Association of India (NLPAI)",
url = "https://aclanthology.org/2021.icon-main.64/",
pages = "529--536",
abstract = "The word representations are based on distributional hypothesis according to which words that occur in the similar contexts, tend to have a similar meaning and appear closer in vector space. For example, the emotionally dissimilar words {\textquotedblright}joy{\textquotedblright} and {\textquotedblright}sadness{\textquotedblright} have higher cosine similarity. The existing pre-trained embedding models lack in emotional words interpretations. For creating our VAD-Emotion embeddings, we modify the pre-trained word embeddings with emotion information. This is a lexicons based approach that uses the Valence, Arousal and Dominance (VAD) values, and the Plutchik`s emotions to incorporate the emotion information in pre-trained word embeddings using post-training processing. This brings emotionally similar words nearer and emotionally dissimilar words away from each other in the proposed vector space. We demonstrate the performance of proposed embedding through NLP downstream task - Emotion Recognition."
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="kulkarni-bhattacharyya-2021-retrofitting">
<titleInfo>
<title>Retrofitting of Pre-trained Emotion Words with VAD-dimensions and the Plutchik Emotions</title>
</titleInfo>
<name type="personal">
<namePart type="given">Manasi</namePart>
<namePart type="family">Kulkarni</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Pushpak</namePart>
<namePart type="family">Bhattacharyya</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 18th International Conference on Natural Language Processing (ICON)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Sivaji</namePart>
<namePart type="family">Bandyopadhyay</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sobha</namePart>
<namePart type="given">Lalitha</namePart>
<namePart type="family">Devi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Pushpak</namePart>
<namePart type="family">Bhattacharyya</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>NLP Association of India (NLPAI)</publisher>
<place>
<placeTerm type="text">National Institute of Technology Silchar, Silchar, India</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>The word representations are based on distributional hypothesis according to which words that occur in the similar contexts, tend to have a similar meaning and appear closer in vector space. For example, the emotionally dissimilar words ”joy” and ”sadness” have higher cosine similarity. The existing pre-trained embedding models lack in emotional words interpretations. For creating our VAD-Emotion embeddings, we modify the pre-trained word embeddings with emotion information. This is a lexicons based approach that uses the Valence, Arousal and Dominance (VAD) values, and the Plutchik‘s emotions to incorporate the emotion information in pre-trained word embeddings using post-training processing. This brings emotionally similar words nearer and emotionally dissimilar words away from each other in the proposed vector space. We demonstrate the performance of proposed embedding through NLP downstream task - Emotion Recognition.</abstract>
<identifier type="citekey">kulkarni-bhattacharyya-2021-retrofitting</identifier>
<location>
<url>https://aclanthology.org/2021.icon-main.64/</url>
</location>
<part>
<date>2021-12</date>
<extent unit="page">
<start>529</start>
<end>536</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Retrofitting of Pre-trained Emotion Words with VAD-dimensions and the Plutchik Emotions
%A Kulkarni, Manasi
%A Bhattacharyya, Pushpak
%Y Bandyopadhyay, Sivaji
%Y Devi, Sobha Lalitha
%Y Bhattacharyya, Pushpak
%S Proceedings of the 18th International Conference on Natural Language Processing (ICON)
%D 2021
%8 December
%I NLP Association of India (NLPAI)
%C National Institute of Technology Silchar, Silchar, India
%F kulkarni-bhattacharyya-2021-retrofitting
%X The word representations are based on distributional hypothesis according to which words that occur in the similar contexts, tend to have a similar meaning and appear closer in vector space. For example, the emotionally dissimilar words ”joy” and ”sadness” have higher cosine similarity. The existing pre-trained embedding models lack in emotional words interpretations. For creating our VAD-Emotion embeddings, we modify the pre-trained word embeddings with emotion information. This is a lexicons based approach that uses the Valence, Arousal and Dominance (VAD) values, and the Plutchik‘s emotions to incorporate the emotion information in pre-trained word embeddings using post-training processing. This brings emotionally similar words nearer and emotionally dissimilar words away from each other in the proposed vector space. We demonstrate the performance of proposed embedding through NLP downstream task - Emotion Recognition.
%U https://aclanthology.org/2021.icon-main.64/
%P 529-536
Markdown (Informal)
[Retrofitting of Pre-trained Emotion Words with VAD-dimensions and the Plutchik Emotions](https://aclanthology.org/2021.icon-main.64/) (Kulkarni & Bhattacharyya, ICON 2021)
ACL