@inproceedings{devianti-miyao-2024-transferability,
title = "Transferability of Syntax-Aware Graph Neural Networks in Zero-Shot Cross-Lingual Semantic Role Labeling",
author = "Devianti, Rachel Sidney and
Miyao, Yusuke",
editor = "Al-Onaizan, Yaser and
Bansal, Mohit and
Chen, Yun-Nung",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2024",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2024.findings-emnlp.2/",
doi = "10.18653/v1/2024.findings-emnlp.2",
pages = "20--42",
abstract = "Recent models in cross-lingual semantic role labeling (SRL) barely analyze the applicability of their network selection.We believe that network selection is important since it affects the transferability of cross-lingual models, i.e., how the model can extract universal features from source languages to label target languages.Therefore, we comprehensively compare the transferability of different graph neural network (GNN)-based models enriched with universal dependency trees.GNN-based models include transformer-based, graph convolutional network-based, and graph attention network (GAT)-based models.We focus our study on a zero-shot setting by training the models in English and evaluating the models in 23 target languages provided by the Universal Proposition Bank.Based on our experiments, we consistently show that syntax from universal dependency trees is essential for cross-lingual SRL models to achieve better transferability.Dependency-aware self-attention with relative position representations (SAN-RPRs) transfer best across languages, especially in the long-range dependency distance.We also show that dependency-aware two-attention relational GATs transfer better than SAN-RPRs in languages where most arguments lie in a 1-2 dependency distance."
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="devianti-miyao-2024-transferability">
<titleInfo>
<title>Transferability of Syntax-Aware Graph Neural Networks in Zero-Shot Cross-Lingual Semantic Role Labeling</title>
</titleInfo>
<name type="personal">
<namePart type="given">Rachel</namePart>
<namePart type="given">Sidney</namePart>
<namePart type="family">Devianti</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yusuke</namePart>
<namePart type="family">Miyao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2024-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Findings of the Association for Computational Linguistics: EMNLP 2024</title>
</titleInfo>
<name type="personal">
<namePart type="given">Yaser</namePart>
<namePart type="family">Al-Onaizan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mohit</namePart>
<namePart type="family">Bansal</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yun-Nung</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Miami, Florida, USA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Recent models in cross-lingual semantic role labeling (SRL) barely analyze the applicability of their network selection.We believe that network selection is important since it affects the transferability of cross-lingual models, i.e., how the model can extract universal features from source languages to label target languages.Therefore, we comprehensively compare the transferability of different graph neural network (GNN)-based models enriched with universal dependency trees.GNN-based models include transformer-based, graph convolutional network-based, and graph attention network (GAT)-based models.We focus our study on a zero-shot setting by training the models in English and evaluating the models in 23 target languages provided by the Universal Proposition Bank.Based on our experiments, we consistently show that syntax from universal dependency trees is essential for cross-lingual SRL models to achieve better transferability.Dependency-aware self-attention with relative position representations (SAN-RPRs) transfer best across languages, especially in the long-range dependency distance.We also show that dependency-aware two-attention relational GATs transfer better than SAN-RPRs in languages where most arguments lie in a 1-2 dependency distance.</abstract>
<identifier type="citekey">devianti-miyao-2024-transferability</identifier>
<identifier type="doi">10.18653/v1/2024.findings-emnlp.2</identifier>
<location>
<url>https://aclanthology.org/2024.findings-emnlp.2/</url>
</location>
<part>
<date>2024-11</date>
<extent unit="page">
<start>20</start>
<end>42</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Transferability of Syntax-Aware Graph Neural Networks in Zero-Shot Cross-Lingual Semantic Role Labeling
%A Devianti, Rachel Sidney
%A Miyao, Yusuke
%Y Al-Onaizan, Yaser
%Y Bansal, Mohit
%Y Chen, Yun-Nung
%S Findings of the Association for Computational Linguistics: EMNLP 2024
%D 2024
%8 November
%I Association for Computational Linguistics
%C Miami, Florida, USA
%F devianti-miyao-2024-transferability
%X Recent models in cross-lingual semantic role labeling (SRL) barely analyze the applicability of their network selection.We believe that network selection is important since it affects the transferability of cross-lingual models, i.e., how the model can extract universal features from source languages to label target languages.Therefore, we comprehensively compare the transferability of different graph neural network (GNN)-based models enriched with universal dependency trees.GNN-based models include transformer-based, graph convolutional network-based, and graph attention network (GAT)-based models.We focus our study on a zero-shot setting by training the models in English and evaluating the models in 23 target languages provided by the Universal Proposition Bank.Based on our experiments, we consistently show that syntax from universal dependency trees is essential for cross-lingual SRL models to achieve better transferability.Dependency-aware self-attention with relative position representations (SAN-RPRs) transfer best across languages, especially in the long-range dependency distance.We also show that dependency-aware two-attention relational GATs transfer better than SAN-RPRs in languages where most arguments lie in a 1-2 dependency distance.
%R 10.18653/v1/2024.findings-emnlp.2
%U https://aclanthology.org/2024.findings-emnlp.2/
%U https://doi.org/10.18653/v1/2024.findings-emnlp.2
%P 20-42
Markdown (Informal)
[Transferability of Syntax-Aware Graph Neural Networks in Zero-Shot Cross-Lingual Semantic Role Labeling](https://aclanthology.org/2024.findings-emnlp.2/) (Devianti & Miyao, Findings 2024)
ACL