@inproceedings{peng-etal-2022-guiding,
title = "Guiding Neural Story Generation with Reader Models",
author = "Peng, Xiangyu and
Xie, Kaige and
Alabdulkarim, Amal and
Kayam, Harshith and
Dani, Samihan and
Riedl, Mark",
editor = "Goldberg, Yoav and
Kozareva, Zornitsa and
Zhang, Yue",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2022",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2022.findings-emnlp.526",
doi = "10.18653/v1/2022.findings-emnlp.526",
pages = "7087--7111",
abstract = "Automated storytelling has long captured the attention of researchers for the ubiquity of narratives in everyday life. However, it is challenging to maintain coherence and stay on-topictoward a specific ending when generating narratives with neural language models. In this paper, we introduce Story generation with ReaderModels (StoRM), a framework in which areader model is used to reason about the storyshould progress. A reader model infers whata human reader believes about the concepts,entities, and relations about the fictional storyworld. We show how an explicit reader modelrepresented as a knowledge graph affords the storycoherence and provides controllability in theform of achieving a given story world stategoal. Experiments show that our model produces significantly more coherent and on-topicstories, outperforming baselines in dimensionsincluding plot plausibility and staying on topic",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="peng-etal-2022-guiding">
<titleInfo>
<title>Guiding Neural Story Generation with Reader Models</title>
</titleInfo>
<name type="personal">
<namePart type="given">Xiangyu</namePart>
<namePart type="family">Peng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Kaige</namePart>
<namePart type="family">Xie</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Amal</namePart>
<namePart type="family">Alabdulkarim</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Harshith</namePart>
<namePart type="family">Kayam</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Samihan</namePart>
<namePart type="family">Dani</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mark</namePart>
<namePart type="family">Riedl</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2022-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Findings of the Association for Computational Linguistics: EMNLP 2022</title>
</titleInfo>
<name type="personal">
<namePart type="given">Yoav</namePart>
<namePart type="family">Goldberg</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zornitsa</namePart>
<namePart type="family">Kozareva</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yue</namePart>
<namePart type="family">Zhang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Abu Dhabi, United Arab Emirates</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Automated storytelling has long captured the attention of researchers for the ubiquity of narratives in everyday life. However, it is challenging to maintain coherence and stay on-topictoward a specific ending when generating narratives with neural language models. In this paper, we introduce Story generation with ReaderModels (StoRM), a framework in which areader model is used to reason about the storyshould progress. A reader model infers whata human reader believes about the concepts,entities, and relations about the fictional storyworld. We show how an explicit reader modelrepresented as a knowledge graph affords the storycoherence and provides controllability in theform of achieving a given story world stategoal. Experiments show that our model produces significantly more coherent and on-topicstories, outperforming baselines in dimensionsincluding plot plausibility and staying on topic</abstract>
<identifier type="citekey">peng-etal-2022-guiding</identifier>
<identifier type="doi">10.18653/v1/2022.findings-emnlp.526</identifier>
<location>
<url>https://aclanthology.org/2022.findings-emnlp.526</url>
</location>
<part>
<date>2022-12</date>
<extent unit="page">
<start>7087</start>
<end>7111</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Guiding Neural Story Generation with Reader Models
%A Peng, Xiangyu
%A Xie, Kaige
%A Alabdulkarim, Amal
%A Kayam, Harshith
%A Dani, Samihan
%A Riedl, Mark
%Y Goldberg, Yoav
%Y Kozareva, Zornitsa
%Y Zhang, Yue
%S Findings of the Association for Computational Linguistics: EMNLP 2022
%D 2022
%8 December
%I Association for Computational Linguistics
%C Abu Dhabi, United Arab Emirates
%F peng-etal-2022-guiding
%X Automated storytelling has long captured the attention of researchers for the ubiquity of narratives in everyday life. However, it is challenging to maintain coherence and stay on-topictoward a specific ending when generating narratives with neural language models. In this paper, we introduce Story generation with ReaderModels (StoRM), a framework in which areader model is used to reason about the storyshould progress. A reader model infers whata human reader believes about the concepts,entities, and relations about the fictional storyworld. We show how an explicit reader modelrepresented as a knowledge graph affords the storycoherence and provides controllability in theform of achieving a given story world stategoal. Experiments show that our model produces significantly more coherent and on-topicstories, outperforming baselines in dimensionsincluding plot plausibility and staying on topic
%R 10.18653/v1/2022.findings-emnlp.526
%U https://aclanthology.org/2022.findings-emnlp.526
%U https://doi.org/10.18653/v1/2022.findings-emnlp.526
%P 7087-7111
Markdown (Informal)
[Guiding Neural Story Generation with Reader Models](https://aclanthology.org/2022.findings-emnlp.526) (Peng et al., Findings 2022)
ACL
- Xiangyu Peng, Kaige Xie, Amal Alabdulkarim, Harshith Kayam, Samihan Dani, and Mark Riedl. 2022. Guiding Neural Story Generation with Reader Models. In Findings of the Association for Computational Linguistics: EMNLP 2022, pages 7087–7111, Abu Dhabi, United Arab Emirates. Association for Computational Linguistics.