@article{choenni-etal-2023-cross,
title = "Cross-Lingual Transfer with Language-Specific Subnetworks for Low-Resource Dependency Parsing",
author = "Choenni, Rochelle and
Garrette, Dan and
Shutova, Ekaterina",
journal = "Computational Linguistics",
month = sep,
year = "2023",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://aclanthology.org/2023.cl-3.3",
doi = "10.1162/coli_a_00482",
pages = "613--641",
abstract = "Large multilingual language models typically share their parameters across all languages, which enables cross-lingual task transfer, but learning can also be hindered when training updates from different languages are in conflict. In this article, we propose novel methods for using language-specific subnetworks, which control cross-lingual parameter sharing, to reduce conflicts and increase positive transfer during fine-tuning. We introduce dynamic subnetworks, which are jointly updated with the model, and we combine our methods with meta-learning, an established, but complementary, technique for improving cross-lingual transfer. Finally, we provide extensive analyses of how each of our methods affects the models.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="choenni-etal-2023-cross">
<titleInfo>
<title>Cross-Lingual Transfer with Language-Specific Subnetworks for Low-Resource Dependency Parsing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Rochelle</namePart>
<namePart type="family">Choenni</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Dan</namePart>
<namePart type="family">Garrette</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ekaterina</namePart>
<namePart type="family">Shutova</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2023-09</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<genre authority="bibutilsgt">journal article</genre>
<relatedItem type="host">
<titleInfo>
<title>Computational Linguistics</title>
</titleInfo>
<originInfo>
<issuance>continuing</issuance>
<publisher>MIT Press</publisher>
<place>
<placeTerm type="text">Cambridge, MA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">periodical</genre>
<genre authority="bibutilsgt">academic journal</genre>
</relatedItem>
<abstract>Large multilingual language models typically share their parameters across all languages, which enables cross-lingual task transfer, but learning can also be hindered when training updates from different languages are in conflict. In this article, we propose novel methods for using language-specific subnetworks, which control cross-lingual parameter sharing, to reduce conflicts and increase positive transfer during fine-tuning. We introduce dynamic subnetworks, which are jointly updated with the model, and we combine our methods with meta-learning, an established, but complementary, technique for improving cross-lingual transfer. Finally, we provide extensive analyses of how each of our methods affects the models.</abstract>
<identifier type="citekey">choenni-etal-2023-cross</identifier>
<identifier type="doi">10.1162/coli_a_00482</identifier>
<location>
<url>https://aclanthology.org/2023.cl-3.3</url>
</location>
<part>
<date>2023-09</date>
<extent unit="page">
<start>613</start>
<end>641</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Journal Article
%T Cross-Lingual Transfer with Language-Specific Subnetworks for Low-Resource Dependency Parsing
%A Choenni, Rochelle
%A Garrette, Dan
%A Shutova, Ekaterina
%J Computational Linguistics
%D 2023
%8 September
%I MIT Press
%C Cambridge, MA
%F choenni-etal-2023-cross
%X Large multilingual language models typically share their parameters across all languages, which enables cross-lingual task transfer, but learning can also be hindered when training updates from different languages are in conflict. In this article, we propose novel methods for using language-specific subnetworks, which control cross-lingual parameter sharing, to reduce conflicts and increase positive transfer during fine-tuning. We introduce dynamic subnetworks, which are jointly updated with the model, and we combine our methods with meta-learning, an established, but complementary, technique for improving cross-lingual transfer. Finally, we provide extensive analyses of how each of our methods affects the models.
%R 10.1162/coli_a_00482
%U https://aclanthology.org/2023.cl-3.3
%U https://doi.org/10.1162/coli_a_00482
%P 613-641
Markdown (Informal)
[Cross-Lingual Transfer with Language-Specific Subnetworks for Low-Resource Dependency Parsing](https://aclanthology.org/2023.cl-3.3) (Choenni et al., CL 2023)
ACL