@inproceedings{wang-zhao-2021-advances,
title = "Advances and Challenges in Unsupervised Neural Machine Translation",
author = "Wang, Rui and
Zhao, Hai",
editor = "Augenstein, Isabelle and
Habernal, Ivan",
booktitle = "Proceedings of the 16th Conference of the European Chapter of the Association for Computational Linguistics: Tutorial Abstracts",
month = apr,
year = "2021",
address = "online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.eacl-tutorials.5",
doi = "10.18653/v1/2021.eacl-tutorials.5",
pages = "17--21",
abstract = "Unsupervised cross-lingual language representation initialization methods, together with mechanisms such as denoising and back-translation, have advanced unsupervised neural machine translation (UNMT), which has achieved impressive results. Meanwhile, there are still several challenges for UNMT. This tutorial first introduces the background and the latest progress of UNMT. We then examine a number of challenges to UNMT and give empirical results on how well the technology currently holds up.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="wang-zhao-2021-advances">
<titleInfo>
<title>Advances and Challenges in Unsupervised Neural Machine Translation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Rui</namePart>
<namePart type="family">Wang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Hai</namePart>
<namePart type="family">Zhao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-04</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 16th Conference of the European Chapter of the Association for Computational Linguistics: Tutorial Abstracts</title>
</titleInfo>
<name type="personal">
<namePart type="given">Isabelle</namePart>
<namePart type="family">Augenstein</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ivan</namePart>
<namePart type="family">Habernal</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Unsupervised cross-lingual language representation initialization methods, together with mechanisms such as denoising and back-translation, have advanced unsupervised neural machine translation (UNMT), which has achieved impressive results. Meanwhile, there are still several challenges for UNMT. This tutorial first introduces the background and the latest progress of UNMT. We then examine a number of challenges to UNMT and give empirical results on how well the technology currently holds up.</abstract>
<identifier type="citekey">wang-zhao-2021-advances</identifier>
<identifier type="doi">10.18653/v1/2021.eacl-tutorials.5</identifier>
<location>
<url>https://aclanthology.org/2021.eacl-tutorials.5</url>
</location>
<part>
<date>2021-04</date>
<extent unit="page">
<start>17</start>
<end>21</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Advances and Challenges in Unsupervised Neural Machine Translation
%A Wang, Rui
%A Zhao, Hai
%Y Augenstein, Isabelle
%Y Habernal, Ivan
%S Proceedings of the 16th Conference of the European Chapter of the Association for Computational Linguistics: Tutorial Abstracts
%D 2021
%8 April
%I Association for Computational Linguistics
%C online
%F wang-zhao-2021-advances
%X Unsupervised cross-lingual language representation initialization methods, together with mechanisms such as denoising and back-translation, have advanced unsupervised neural machine translation (UNMT), which has achieved impressive results. Meanwhile, there are still several challenges for UNMT. This tutorial first introduces the background and the latest progress of UNMT. We then examine a number of challenges to UNMT and give empirical results on how well the technology currently holds up.
%R 10.18653/v1/2021.eacl-tutorials.5
%U https://aclanthology.org/2021.eacl-tutorials.5
%U https://doi.org/10.18653/v1/2021.eacl-tutorials.5
%P 17-21
Markdown (Informal)
[Advances and Challenges in Unsupervised Neural Machine Translation](https://aclanthology.org/2021.eacl-tutorials.5) (Wang & Zhao, EACL 2021)
ACL