@inproceedings{tu-etal-2021-unsupervised,
title = "Unsupervised Natural Language Parsing (Introductory Tutorial)",
author = "Tu, Kewei and
Jiang, Yong and
Han, Wenjuan and
Zhao, Yanpeng",
editor = "Augenstein, Isabelle and
Habernal, Ivan",
booktitle = "Proceedings of the 16th Conference of the European Chapter of the Association for Computational Linguistics: Tutorial Abstracts",
month = apr,
year = "2021",
address = "online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.eacl-tutorials.1",
doi = "10.18653/v1/2021.eacl-tutorials.1",
pages = "1--5",
abstract = "Unsupervised parsing learns a syntactic parser from training sentences without parse tree annotations. Recently, there has been a resurgence of interest in unsupervised parsing, which can be attributed to the combination of two trends in the NLP community: a general trend towards unsupervised training or pre-training, and an emerging trend towards finding or modeling linguistic structures in neural models. In this tutorial, we will introduce to the general audience what unsupervised parsing does and how it can be useful for and beyond syntactic parsing. We will then provide a systematic overview of major classes of approaches to unsupervised parsing, namely generative and discriminative approaches, and analyze their relative strengths and weaknesses. We will cover both decade-old statistical approaches and more recent neural approaches to give the audience a sense of the historical and recent development of the field. We will also discuss emerging research topics such as BERT-based approaches and visually grounded learning.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="tu-etal-2021-unsupervised">
<titleInfo>
<title>Unsupervised Natural Language Parsing (Introductory Tutorial)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Kewei</namePart>
<namePart type="family">Tu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yong</namePart>
<namePart type="family">Jiang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Wenjuan</namePart>
<namePart type="family">Han</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yanpeng</namePart>
<namePart type="family">Zhao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-04</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 16th Conference of the European Chapter of the Association for Computational Linguistics: Tutorial Abstracts</title>
</titleInfo>
<name type="personal">
<namePart type="given">Isabelle</namePart>
<namePart type="family">Augenstein</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ivan</namePart>
<namePart type="family">Habernal</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Unsupervised parsing learns a syntactic parser from training sentences without parse tree annotations. Recently, there has been a resurgence of interest in unsupervised parsing, which can be attributed to the combination of two trends in the NLP community: a general trend towards unsupervised training or pre-training, and an emerging trend towards finding or modeling linguistic structures in neural models. In this tutorial, we will introduce to the general audience what unsupervised parsing does and how it can be useful for and beyond syntactic parsing. We will then provide a systematic overview of major classes of approaches to unsupervised parsing, namely generative and discriminative approaches, and analyze their relative strengths and weaknesses. We will cover both decade-old statistical approaches and more recent neural approaches to give the audience a sense of the historical and recent development of the field. We will also discuss emerging research topics such as BERT-based approaches and visually grounded learning.</abstract>
<identifier type="citekey">tu-etal-2021-unsupervised</identifier>
<identifier type="doi">10.18653/v1/2021.eacl-tutorials.1</identifier>
<location>
<url>https://aclanthology.org/2021.eacl-tutorials.1</url>
</location>
<part>
<date>2021-04</date>
<extent unit="page">
<start>1</start>
<end>5</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Unsupervised Natural Language Parsing (Introductory Tutorial)
%A Tu, Kewei
%A Jiang, Yong
%A Han, Wenjuan
%A Zhao, Yanpeng
%Y Augenstein, Isabelle
%Y Habernal, Ivan
%S Proceedings of the 16th Conference of the European Chapter of the Association for Computational Linguistics: Tutorial Abstracts
%D 2021
%8 April
%I Association for Computational Linguistics
%C online
%F tu-etal-2021-unsupervised
%X Unsupervised parsing learns a syntactic parser from training sentences without parse tree annotations. Recently, there has been a resurgence of interest in unsupervised parsing, which can be attributed to the combination of two trends in the NLP community: a general trend towards unsupervised training or pre-training, and an emerging trend towards finding or modeling linguistic structures in neural models. In this tutorial, we will introduce to the general audience what unsupervised parsing does and how it can be useful for and beyond syntactic parsing. We will then provide a systematic overview of major classes of approaches to unsupervised parsing, namely generative and discriminative approaches, and analyze their relative strengths and weaknesses. We will cover both decade-old statistical approaches and more recent neural approaches to give the audience a sense of the historical and recent development of the field. We will also discuss emerging research topics such as BERT-based approaches and visually grounded learning.
%R 10.18653/v1/2021.eacl-tutorials.1
%U https://aclanthology.org/2021.eacl-tutorials.1
%U https://doi.org/10.18653/v1/2021.eacl-tutorials.1
%P 1-5
Markdown (Informal)
[Unsupervised Natural Language Parsing (Introductory Tutorial)](https://aclanthology.org/2021.eacl-tutorials.1) (Tu et al., EACL 2021)
ACL
- Kewei Tu, Yong Jiang, Wenjuan Han, and Yanpeng Zhao. 2021. Unsupervised Natural Language Parsing (Introductory Tutorial). In Proceedings of the 16th Conference of the European Chapter of the Association for Computational Linguistics: Tutorial Abstracts, pages 1–5, online. Association for Computational Linguistics.