@inproceedings{hayashi-etal-2009-structural,
title = "Structural support vector machines for log-linear approach in statistical machine translation",
author = "Hayashi, Katsuhiko and
Watanabe, Taro and
Tsukada, Hajime and
Isozaki, Hideki",
booktitle = "Proceedings of the 6th International Workshop on Spoken Language Translation: Papers",
month = dec # " 1-2",
year = "2009",
address = "Tokyo, Japan",
url = "https://aclanthology.org/2009.iwslt-papers.3/",
pages = "144--151",
abstract = "Minimum error rate training (MERT) is a widely used learning method for statistical machine translation. In this paper, we present a SVM-based training method to enhance generalization ability. We extend MERT optimization by maximizing the margin between the reference and incorrect translations under the L2-norm prior to avoid overfitting problem. Translation accuracy obtained by our proposed methods is more stable in various conditions than that obtained by MERT. Our experimental results on the French-English WMT08 shared task show that degrade of our proposed methods is smaller than that of MERT in case of small training data or out-of-domain test data."
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="hayashi-etal-2009-structural">
<titleInfo>
<title>Structural support vector machines for log-linear approach in statistical machine translation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Katsuhiko</namePart>
<namePart type="family">Hayashi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Taro</namePart>
<namePart type="family">Watanabe</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Hajime</namePart>
<namePart type="family">Tsukada</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Hideki</namePart>
<namePart type="family">Isozaki</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2009-dec 1-2</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 6th International Workshop on Spoken Language Translation: Papers</title>
</titleInfo>
<originInfo>
<place>
<placeTerm type="text">Tokyo, Japan</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Minimum error rate training (MERT) is a widely used learning method for statistical machine translation. In this paper, we present a SVM-based training method to enhance generalization ability. We extend MERT optimization by maximizing the margin between the reference and incorrect translations under the L2-norm prior to avoid overfitting problem. Translation accuracy obtained by our proposed methods is more stable in various conditions than that obtained by MERT. Our experimental results on the French-English WMT08 shared task show that degrade of our proposed methods is smaller than that of MERT in case of small training data or out-of-domain test data.</abstract>
<identifier type="citekey">hayashi-etal-2009-structural</identifier>
<location>
<url>https://aclanthology.org/2009.iwslt-papers.3/</url>
</location>
<part>
<date>2009-dec 1-2</date>
<extent unit="page">
<start>144</start>
<end>151</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Structural support vector machines for log-linear approach in statistical machine translation
%A Hayashi, Katsuhiko
%A Watanabe, Taro
%A Tsukada, Hajime
%A Isozaki, Hideki
%S Proceedings of the 6th International Workshop on Spoken Language Translation: Papers
%D 2009
%8 dec 1 2
%C Tokyo, Japan
%F hayashi-etal-2009-structural
%X Minimum error rate training (MERT) is a widely used learning method for statistical machine translation. In this paper, we present a SVM-based training method to enhance generalization ability. We extend MERT optimization by maximizing the margin between the reference and incorrect translations under the L2-norm prior to avoid overfitting problem. Translation accuracy obtained by our proposed methods is more stable in various conditions than that obtained by MERT. Our experimental results on the French-English WMT08 shared task show that degrade of our proposed methods is smaller than that of MERT in case of small training data or out-of-domain test data.
%U https://aclanthology.org/2009.iwslt-papers.3/
%P 144-151
Markdown (Informal)
[Structural support vector machines for log-linear approach in statistical machine translation](https://aclanthology.org/2009.iwslt-papers.3/) (Hayashi et al., IWSLT 2009)
ACL