@inproceedings{shin-etal-2013-maximum,
title = "Maximum entropy language modeling for {R}ussian {ASR}",
author = {Shin, Evgeniy and
St{\"u}ker, Sebastian and
Kilgour, Kevin and
F{\"u}gen, Christian and
Waibel, Alex},
editor = "Zhang, Joy Ying",
booktitle = "Proceedings of the 10th International Workshop on Spoken Language Translation: Papers",
month = dec # " 5-6",
year = "2013",
address = "Heidelberg, Germany",
url = "https://aclanthology.org/2013.iwslt-papers.13",
abstract = "Russian is a challenging language for automatic speech recognition systems due to its rich morphology. This rich morphology stems from Russian{'}s highly inflectional nature and the frequent use of preand suffixes. Also, Russian has a very free word order, changes in which are used to reflect connotations of the sentences. Dealing with these phenomena is rather difficult for traditional n-gram models. We therefore investigate in this paper the use of a maximum entropy language model for Russian whose features are specifically designed to deal with the inflections in Russian, as well as the loose word order. We combine this with a subword based language model in order to alleviate the problem of large vocabulary sizes necessary for dealing with highly inflecting languages. Applying the maximum entropy language model during re-scoring improves the word error rate of our recognition system by 1.2{\%} absolute, while the use of the sub-word based language model reduces the vocabulary size from 120k to 40k and the OOV rate from 4.8{\%} to 2.1{\%}.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="shin-etal-2013-maximum">
<titleInfo>
<title>Maximum entropy language modeling for Russian ASR</title>
</titleInfo>
<name type="personal">
<namePart type="given">Evgeniy</namePart>
<namePart type="family">Shin</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sebastian</namePart>
<namePart type="family">Stüker</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Kevin</namePart>
<namePart type="family">Kilgour</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Christian</namePart>
<namePart type="family">Fügen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alex</namePart>
<namePart type="family">Waibel</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2013-dec 5-6</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 10th International Workshop on Spoken Language Translation: Papers</title>
</titleInfo>
<name type="personal">
<namePart type="given">Joy</namePart>
<namePart type="given">Ying</namePart>
<namePart type="family">Zhang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<place>
<placeTerm type="text">Heidelberg, Germany</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Russian is a challenging language for automatic speech recognition systems due to its rich morphology. This rich morphology stems from Russian’s highly inflectional nature and the frequent use of preand suffixes. Also, Russian has a very free word order, changes in which are used to reflect connotations of the sentences. Dealing with these phenomena is rather difficult for traditional n-gram models. We therefore investigate in this paper the use of a maximum entropy language model for Russian whose features are specifically designed to deal with the inflections in Russian, as well as the loose word order. We combine this with a subword based language model in order to alleviate the problem of large vocabulary sizes necessary for dealing with highly inflecting languages. Applying the maximum entropy language model during re-scoring improves the word error rate of our recognition system by 1.2% absolute, while the use of the sub-word based language model reduces the vocabulary size from 120k to 40k and the OOV rate from 4.8% to 2.1%.</abstract>
<identifier type="citekey">shin-etal-2013-maximum</identifier>
<location>
<url>https://aclanthology.org/2013.iwslt-papers.13</url>
</location>
<part>
<date>2013-dec 5-6</date>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Maximum entropy language modeling for Russian ASR
%A Shin, Evgeniy
%A Stüker, Sebastian
%A Kilgour, Kevin
%A Fügen, Christian
%A Waibel, Alex
%Y Zhang, Joy Ying
%S Proceedings of the 10th International Workshop on Spoken Language Translation: Papers
%D 2013
%8 dec 5 6
%C Heidelberg, Germany
%F shin-etal-2013-maximum
%X Russian is a challenging language for automatic speech recognition systems due to its rich morphology. This rich morphology stems from Russian’s highly inflectional nature and the frequent use of preand suffixes. Also, Russian has a very free word order, changes in which are used to reflect connotations of the sentences. Dealing with these phenomena is rather difficult for traditional n-gram models. We therefore investigate in this paper the use of a maximum entropy language model for Russian whose features are specifically designed to deal with the inflections in Russian, as well as the loose word order. We combine this with a subword based language model in order to alleviate the problem of large vocabulary sizes necessary for dealing with highly inflecting languages. Applying the maximum entropy language model during re-scoring improves the word error rate of our recognition system by 1.2% absolute, while the use of the sub-word based language model reduces the vocabulary size from 120k to 40k and the OOV rate from 4.8% to 2.1%.
%U https://aclanthology.org/2013.iwslt-papers.13
Markdown (Informal)
[Maximum entropy language modeling for Russian ASR](https://aclanthology.org/2013.iwslt-papers.13) (Shin et al., IWSLT 2013)
ACL
- Evgeniy Shin, Sebastian Stüker, Kevin Kilgour, Christian Fügen, and Alex Waibel. 2013. Maximum entropy language modeling for Russian ASR. In Proceedings of the 10th International Workshop on Spoken Language Translation: Papers, Heidelberg, Germany.