@inproceedings{meekhof-clements-2000-l,
title = "{L}{\&}{H} lexicography toolkit for machine translation",
author = "Meekhof, Timothy and
Clements, David",
editor = "White, John S.",
booktitle = "Proceedings of the Fourth Conference of the Association for Machine Translation in the Americas: System Descriptions",
month = oct # " 10-14",
year = "2000",
address = "Cuernavaca, Mexico",
publisher = "Springer",
url = "https://link.springer.com/chapter/10.1007/3-540-39965-8_24",
pages = "213--218",
abstract = "One of the most important components of any machine translation system is the translation lexicon. The size and quality of the lexicon, as well as the coverage of the lexicon for a particular use, greatly influence the applicability of machine translation for a user. The high cost of lexicon development limits the extent to which even mature machine translation vendors can expand and specialize their lexicons, and frequently prevents users from building extensive lexicons at all. To address the high cost of lexicography for machine translation, L{\&}H is building a Lexicography Toolkit that includes tools that can significantly improve the process of creating custom lexicons. The toolkit is based on the concept of using automatic methods of data acquisition, using text corpora, to generate lexicon entries. Of course, lexicon entries must be accurate, so the work of the toolkit must be checked by human experts at several stages. However, this checking mostly consists of removing erroneous results, rather than adding data and entire entries. This article will explore how the Lexicography Toolkit would be used to create a lexicon that is specific to the user{'}s domain.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="meekhof-clements-2000-l">
<titleInfo>
<title>L&H lexicography toolkit for machine translation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Timothy</namePart>
<namePart type="family">Meekhof</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">David</namePart>
<namePart type="family">Clements</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2000-oct 10-14</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fourth Conference of the Association for Machine Translation in the Americas: System Descriptions</title>
</titleInfo>
<name type="personal">
<namePart type="given">John</namePart>
<namePart type="given">S</namePart>
<namePart type="family">White</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Springer</publisher>
<place>
<placeTerm type="text">Cuernavaca, Mexico</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>One of the most important components of any machine translation system is the translation lexicon. The size and quality of the lexicon, as well as the coverage of the lexicon for a particular use, greatly influence the applicability of machine translation for a user. The high cost of lexicon development limits the extent to which even mature machine translation vendors can expand and specialize their lexicons, and frequently prevents users from building extensive lexicons at all. To address the high cost of lexicography for machine translation, L&H is building a Lexicography Toolkit that includes tools that can significantly improve the process of creating custom lexicons. The toolkit is based on the concept of using automatic methods of data acquisition, using text corpora, to generate lexicon entries. Of course, lexicon entries must be accurate, so the work of the toolkit must be checked by human experts at several stages. However, this checking mostly consists of removing erroneous results, rather than adding data and entire entries. This article will explore how the Lexicography Toolkit would be used to create a lexicon that is specific to the user’s domain.</abstract>
<identifier type="citekey">meekhof-clements-2000-l</identifier>
<location>
<url>https://link.springer.com/chapter/10.1007/3-540-39965-8_24</url>
</location>
<part>
<date>2000-oct 10-14</date>
<extent unit="page">
<start>213</start>
<end>218</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T L&H lexicography toolkit for machine translation
%A Meekhof, Timothy
%A Clements, David
%Y White, John S.
%S Proceedings of the Fourth Conference of the Association for Machine Translation in the Americas: System Descriptions
%D 2000
%8 oct 10 14
%I Springer
%C Cuernavaca, Mexico
%F meekhof-clements-2000-l
%X One of the most important components of any machine translation system is the translation lexicon. The size and quality of the lexicon, as well as the coverage of the lexicon for a particular use, greatly influence the applicability of machine translation for a user. The high cost of lexicon development limits the extent to which even mature machine translation vendors can expand and specialize their lexicons, and frequently prevents users from building extensive lexicons at all. To address the high cost of lexicography for machine translation, L&H is building a Lexicography Toolkit that includes tools that can significantly improve the process of creating custom lexicons. The toolkit is based on the concept of using automatic methods of data acquisition, using text corpora, to generate lexicon entries. Of course, lexicon entries must be accurate, so the work of the toolkit must be checked by human experts at several stages. However, this checking mostly consists of removing erroneous results, rather than adding data and entire entries. This article will explore how the Lexicography Toolkit would be used to create a lexicon that is specific to the user’s domain.
%U https://link.springer.com/chapter/10.1007/3-540-39965-8_24
%P 213-218
Markdown (Informal)
[L&H lexicography toolkit for machine translation](https://link.springer.com/chapter/10.1007/3-540-39965-8_24) (Meekhof & Clements, AMTA 2000)
ACL
- Timothy Meekhof and David Clements. 2000. L&H lexicography toolkit for machine translation. In Proceedings of the Fourth Conference of the Association for Machine Translation in the Americas: System Descriptions, pages 213–218, Cuernavaca, Mexico. Springer.