@article{wysocki-etal-2023-transformers,
title = "Transformers and the Representation of Biomedical Background Knowledge",
author = "Wysocki, Oskar and
Zhou, Zili and
O{'}Regan, Paul and
Ferreira, Deborah and
Wysocka, Magdalena and
Landers, D{\'o}nal and
Freitas, Andr{\'e}",
journal = "Computational Linguistics",
volume = "49",
number = "1",
month = mar,
year = "2023",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://aclanthology.org/2023.cl-1.2",
doi = "10.1162/coli_a_00462",
pages = "73--115",
abstract = "Specialized transformers-based models (such as BioBERT and BioMegatron) are adapted for the biomedical domain based on publicly available biomedical corpora. As such, they have the potential to encode large-scale biological knowledge. We investigate the encoding and representation of biological knowledge in these models, and its potential utility to support inference in cancer precision medicine{---}namely, the interpretation of the clinical significance of genomic alterations. We compare the performance of different transformer baselines; we use probing to determine the consistency of encodings for distinct entities; and we use clustering methods to compare and contrast the internal properties of the embeddings for genes, variants, drugs, and diseases. We show that these models do indeed encode biological knowledge, although some of this is lost in fine-tuning for specific tasks. Finally, we analyze how the models behave with regard to biases and imbalances in the dataset.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="wysocki-etal-2023-transformers">
<titleInfo>
<title>Transformers and the Representation of Biomedical Background Knowledge</title>
</titleInfo>
<name type="personal">
<namePart type="given">Oskar</namePart>
<namePart type="family">Wysocki</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zili</namePart>
<namePart type="family">Zhou</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Paul</namePart>
<namePart type="family">O’Regan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Deborah</namePart>
<namePart type="family">Ferreira</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Magdalena</namePart>
<namePart type="family">Wysocka</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Dónal</namePart>
<namePart type="family">Landers</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">André</namePart>
<namePart type="family">Freitas</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2023-03</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<genre authority="bibutilsgt">journal article</genre>
<relatedItem type="host">
<titleInfo>
<title>Computational Linguistics</title>
</titleInfo>
<originInfo>
<issuance>continuing</issuance>
<publisher>MIT Press</publisher>
<place>
<placeTerm type="text">Cambridge, MA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">periodical</genre>
<genre authority="bibutilsgt">academic journal</genre>
</relatedItem>
<abstract>Specialized transformers-based models (such as BioBERT and BioMegatron) are adapted for the biomedical domain based on publicly available biomedical corpora. As such, they have the potential to encode large-scale biological knowledge. We investigate the encoding and representation of biological knowledge in these models, and its potential utility to support inference in cancer precision medicine—namely, the interpretation of the clinical significance of genomic alterations. We compare the performance of different transformer baselines; we use probing to determine the consistency of encodings for distinct entities; and we use clustering methods to compare and contrast the internal properties of the embeddings for genes, variants, drugs, and diseases. We show that these models do indeed encode biological knowledge, although some of this is lost in fine-tuning for specific tasks. Finally, we analyze how the models behave with regard to biases and imbalances in the dataset.</abstract>
<identifier type="citekey">wysocki-etal-2023-transformers</identifier>
<identifier type="doi">10.1162/coli_a_00462</identifier>
<location>
<url>https://aclanthology.org/2023.cl-1.2</url>
</location>
<part>
<date>2023-03</date>
<detail type="volume"><number>49</number></detail>
<detail type="issue"><number>1</number></detail>
<extent unit="page">
<start>73</start>
<end>115</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Journal Article
%T Transformers and the Representation of Biomedical Background Knowledge
%A Wysocki, Oskar
%A Zhou, Zili
%A O’Regan, Paul
%A Ferreira, Deborah
%A Wysocka, Magdalena
%A Landers, Dónal
%A Freitas, André
%J Computational Linguistics
%D 2023
%8 March
%V 49
%N 1
%I MIT Press
%C Cambridge, MA
%F wysocki-etal-2023-transformers
%X Specialized transformers-based models (such as BioBERT and BioMegatron) are adapted for the biomedical domain based on publicly available biomedical corpora. As such, they have the potential to encode large-scale biological knowledge. We investigate the encoding and representation of biological knowledge in these models, and its potential utility to support inference in cancer precision medicine—namely, the interpretation of the clinical significance of genomic alterations. We compare the performance of different transformer baselines; we use probing to determine the consistency of encodings for distinct entities; and we use clustering methods to compare and contrast the internal properties of the embeddings for genes, variants, drugs, and diseases. We show that these models do indeed encode biological knowledge, although some of this is lost in fine-tuning for specific tasks. Finally, we analyze how the models behave with regard to biases and imbalances in the dataset.
%R 10.1162/coli_a_00462
%U https://aclanthology.org/2023.cl-1.2
%U https://doi.org/10.1162/coli_a_00462
%P 73-115
Markdown (Informal)
[Transformers and the Representation of Biomedical Background Knowledge](https://aclanthology.org/2023.cl-1.2) (Wysocki et al., CL 2023)
ACL