@inproceedings{cho-etal-2020-leveraging,
title = "Leveraging {W}ord{N}et Paths for Neural Hypernym Prediction",
author = "Cho, Yejin and
Rodriguez, Juan Diego and
Gao, Yifan and
Erk, Katrin",
editor = "Scott, Donia and
Bel, Nuria and
Zong, Chengqing",
booktitle = "Proceedings of the 28th International Conference on Computational Linguistics",
month = dec,
year = "2020",
address = "Barcelona, Spain (Online)",
publisher = "International Committee on Computational Linguistics",
url = "https://aclanthology.org/2020.coling-main.268",
doi = "10.18653/v1/2020.coling-main.268",
pages = "3007--3018",
abstract = "We formulate the problem of hypernym prediction as a sequence generation task, where the sequences are taxonomy paths in WordNet. Our experiments with encoder-decoder models show that training to generate taxonomy paths can improve the performance of direct hypernym prediction. As a simple but powerful model, the hypo2path model achieves state-of-the-art performance, outperforming the best benchmark by 4.11 points in hit-at-one (H@1).",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="cho-etal-2020-leveraging">
<titleInfo>
<title>Leveraging WordNet Paths for Neural Hypernym Prediction</title>
</titleInfo>
<name type="personal">
<namePart type="given">Yejin</namePart>
<namePart type="family">Cho</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Juan</namePart>
<namePart type="given">Diego</namePart>
<namePart type="family">Rodriguez</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yifan</namePart>
<namePart type="family">Gao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Katrin</namePart>
<namePart type="family">Erk</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 28th International Conference on Computational Linguistics</title>
</titleInfo>
<name type="personal">
<namePart type="given">Donia</namePart>
<namePart type="family">Scott</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Nuria</namePart>
<namePart type="family">Bel</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Chengqing</namePart>
<namePart type="family">Zong</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>International Committee on Computational Linguistics</publisher>
<place>
<placeTerm type="text">Barcelona, Spain (Online)</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We formulate the problem of hypernym prediction as a sequence generation task, where the sequences are taxonomy paths in WordNet. Our experiments with encoder-decoder models show that training to generate taxonomy paths can improve the performance of direct hypernym prediction. As a simple but powerful model, the hypo2path model achieves state-of-the-art performance, outperforming the best benchmark by 4.11 points in hit-at-one (H@1).</abstract>
<identifier type="citekey">cho-etal-2020-leveraging</identifier>
<identifier type="doi">10.18653/v1/2020.coling-main.268</identifier>
<location>
<url>https://aclanthology.org/2020.coling-main.268</url>
</location>
<part>
<date>2020-12</date>
<extent unit="page">
<start>3007</start>
<end>3018</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Leveraging WordNet Paths for Neural Hypernym Prediction
%A Cho, Yejin
%A Rodriguez, Juan Diego
%A Gao, Yifan
%A Erk, Katrin
%Y Scott, Donia
%Y Bel, Nuria
%Y Zong, Chengqing
%S Proceedings of the 28th International Conference on Computational Linguistics
%D 2020
%8 December
%I International Committee on Computational Linguistics
%C Barcelona, Spain (Online)
%F cho-etal-2020-leveraging
%X We formulate the problem of hypernym prediction as a sequence generation task, where the sequences are taxonomy paths in WordNet. Our experiments with encoder-decoder models show that training to generate taxonomy paths can improve the performance of direct hypernym prediction. As a simple but powerful model, the hypo2path model achieves state-of-the-art performance, outperforming the best benchmark by 4.11 points in hit-at-one (H@1).
%R 10.18653/v1/2020.coling-main.268
%U https://aclanthology.org/2020.coling-main.268
%U https://doi.org/10.18653/v1/2020.coling-main.268
%P 3007-3018
Markdown (Informal)
[Leveraging WordNet Paths for Neural Hypernym Prediction](https://aclanthology.org/2020.coling-main.268) (Cho et al., COLING 2020)
ACL
- Yejin Cho, Juan Diego Rodriguez, Yifan Gao, and Katrin Erk. 2020. Leveraging WordNet Paths for Neural Hypernym Prediction. In Proceedings of the 28th International Conference on Computational Linguistics, pages 3007–3018, Barcelona, Spain (Online). International Committee on Computational Linguistics.