@inproceedings{goodwin-demner-fushman-2020-enhancing,
title = "Enhancing Question Answering by Injecting Ontological Knowledge through Regularization",
author = "Goodwin, Travis and
Demner-Fushman, Dina",
editor = "Agirre, Eneko and
Apidianaki, Marianna and
Vuli{\'c}, Ivan",
booktitle = "Proceedings of Deep Learning Inside Out (DeeLIO): The First Workshop on Knowledge Extraction and Integration for Deep Learning Architectures",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.deelio-1.7/",
doi = "10.18653/v1/2020.deelio-1.7",
pages = "56--63",
abstract = "Deep neural networks have demonstrated high performance on many natural language processing (NLP) tasks that can be answered directly from text, and have struggled to solve NLP tasks requiring external (e.g., world) knowledge. In this paper, we present OSCR (Ontology-based Semantic Composition Regularization), a method for injecting task-agnostic knowledge from an Ontology or knowledge graph into a neural network during pre-training. We evaluated the performance of BERT pre-trained on Wikipedia with and without OSCR by measuring the performance when fine-tuning on two question answering tasks involving world knowledge and causal reasoning and one requiring domain (healthcare) knowledge and obtained 33.3{\%}, 18.6{\%}, and 4{\%} improved accuracy compared to pre-training BERT without OSCR."
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="goodwin-demner-fushman-2020-enhancing">
<titleInfo>
<title>Enhancing Question Answering by Injecting Ontological Knowledge through Regularization</title>
</titleInfo>
<name type="personal">
<namePart type="given">Travis</namePart>
<namePart type="family">Goodwin</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Dina</namePart>
<namePart type="family">Demner-Fushman</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of Deep Learning Inside Out (DeeLIO): The First Workshop on Knowledge Extraction and Integration for Deep Learning Architectures</title>
</titleInfo>
<name type="personal">
<namePart type="given">Eneko</namePart>
<namePart type="family">Agirre</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Marianna</namePart>
<namePart type="family">Apidianaki</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ivan</namePart>
<namePart type="family">Vulić</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Deep neural networks have demonstrated high performance on many natural language processing (NLP) tasks that can be answered directly from text, and have struggled to solve NLP tasks requiring external (e.g., world) knowledge. In this paper, we present OSCR (Ontology-based Semantic Composition Regularization), a method for injecting task-agnostic knowledge from an Ontology or knowledge graph into a neural network during pre-training. We evaluated the performance of BERT pre-trained on Wikipedia with and without OSCR by measuring the performance when fine-tuning on two question answering tasks involving world knowledge and causal reasoning and one requiring domain (healthcare) knowledge and obtained 33.3%, 18.6%, and 4% improved accuracy compared to pre-training BERT without OSCR.</abstract>
<identifier type="citekey">goodwin-demner-fushman-2020-enhancing</identifier>
<identifier type="doi">10.18653/v1/2020.deelio-1.7</identifier>
<location>
<url>https://aclanthology.org/2020.deelio-1.7/</url>
</location>
<part>
<date>2020-11</date>
<extent unit="page">
<start>56</start>
<end>63</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Enhancing Question Answering by Injecting Ontological Knowledge through Regularization
%A Goodwin, Travis
%A Demner-Fushman, Dina
%Y Agirre, Eneko
%Y Apidianaki, Marianna
%Y Vulić, Ivan
%S Proceedings of Deep Learning Inside Out (DeeLIO): The First Workshop on Knowledge Extraction and Integration for Deep Learning Architectures
%D 2020
%8 November
%I Association for Computational Linguistics
%C Online
%F goodwin-demner-fushman-2020-enhancing
%X Deep neural networks have demonstrated high performance on many natural language processing (NLP) tasks that can be answered directly from text, and have struggled to solve NLP tasks requiring external (e.g., world) knowledge. In this paper, we present OSCR (Ontology-based Semantic Composition Regularization), a method for injecting task-agnostic knowledge from an Ontology or knowledge graph into a neural network during pre-training. We evaluated the performance of BERT pre-trained on Wikipedia with and without OSCR by measuring the performance when fine-tuning on two question answering tasks involving world knowledge and causal reasoning and one requiring domain (healthcare) knowledge and obtained 33.3%, 18.6%, and 4% improved accuracy compared to pre-training BERT without OSCR.
%R 10.18653/v1/2020.deelio-1.7
%U https://aclanthology.org/2020.deelio-1.7/
%U https://doi.org/10.18653/v1/2020.deelio-1.7
%P 56-63
Markdown (Informal)
[Enhancing Question Answering by Injecting Ontological Knowledge through Regularization](https://aclanthology.org/2020.deelio-1.7/) (Goodwin & Demner-Fushman, DeeLIO 2020)
ACL