@inproceedings{cao-etal-2024-deep,
title = "Deep Learning Meets Egyptology: a Hieroglyphic Transformer for Translating {A}ncient {E}gyptian",
author = "De Cao, Mattia and
De Cao, Nicola and
Colonna, Angelo and
Lenci, Alessandro",
editor = "Pavlopoulos, John and
Sommerschield, Thea and
Assael, Yannis and
Gordin, Shai and
Cho, Kyunghyun and
Passarotti, Marco and
Sprugnoli, Rachele and
Liu, Yudong and
Li, Bin and
Anderson, Adam",
booktitle = "Proceedings of the 1st Workshop on Machine Learning for Ancient Languages (ML4AL 2024)",
month = aug,
year = "2024",
address = "Hybrid in Bangkok, Thailand and online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2024.ml4al-1.9/",
doi = "10.18653/v1/2024.ml4al-1.9",
pages = "71--86",
abstract = "This work explores the potential of Transformer models focusing on the translation of ancient Egyptian hieroglyphs. We present a novel Hieroglyphic Transformer model, built upon the powerful M2M-100 multilingual translation framework and trained on a dataset we customised from the Thesaurus Linguae Aegyptiae database. Our experiments demonstrate promising results, with the model achieving significant accuracy in translating hieroglyphics into both German and English. This work holds significant implications for Egyptology, potentially accelerating the translation process and unlocking new research approaches."
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="cao-etal-2024-deep">
<titleInfo>
<title>Deep Learning Meets Egyptology: a Hieroglyphic Transformer for Translating Ancient Egyptian</title>
</titleInfo>
<name type="personal">
<namePart type="given">Mattia</namePart>
<namePart type="family">De Cao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Nicola</namePart>
<namePart type="family">De Cao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Angelo</namePart>
<namePart type="family">Colonna</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alessandro</namePart>
<namePart type="family">Lenci</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2024-08</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 1st Workshop on Machine Learning for Ancient Languages (ML4AL 2024)</title>
</titleInfo>
<name type="personal">
<namePart type="given">John</namePart>
<namePart type="family">Pavlopoulos</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Thea</namePart>
<namePart type="family">Sommerschield</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yannis</namePart>
<namePart type="family">Assael</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Shai</namePart>
<namePart type="family">Gordin</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Kyunghyun</namePart>
<namePart type="family">Cho</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Marco</namePart>
<namePart type="family">Passarotti</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Rachele</namePart>
<namePart type="family">Sprugnoli</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yudong</namePart>
<namePart type="family">Liu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Bin</namePart>
<namePart type="family">Li</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Adam</namePart>
<namePart type="family">Anderson</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Hybrid in Bangkok, Thailand and online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This work explores the potential of Transformer models focusing on the translation of ancient Egyptian hieroglyphs. We present a novel Hieroglyphic Transformer model, built upon the powerful M2M-100 multilingual translation framework and trained on a dataset we customised from the Thesaurus Linguae Aegyptiae database. Our experiments demonstrate promising results, with the model achieving significant accuracy in translating hieroglyphics into both German and English. This work holds significant implications for Egyptology, potentially accelerating the translation process and unlocking new research approaches.</abstract>
<identifier type="citekey">cao-etal-2024-deep</identifier>
<identifier type="doi">10.18653/v1/2024.ml4al-1.9</identifier>
<location>
<url>https://aclanthology.org/2024.ml4al-1.9/</url>
</location>
<part>
<date>2024-08</date>
<extent unit="page">
<start>71</start>
<end>86</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Deep Learning Meets Egyptology: a Hieroglyphic Transformer for Translating Ancient Egyptian
%A De Cao, Mattia
%A De Cao, Nicola
%A Colonna, Angelo
%A Lenci, Alessandro
%Y Pavlopoulos, John
%Y Sommerschield, Thea
%Y Assael, Yannis
%Y Gordin, Shai
%Y Cho, Kyunghyun
%Y Passarotti, Marco
%Y Sprugnoli, Rachele
%Y Liu, Yudong
%Y Li, Bin
%Y Anderson, Adam
%S Proceedings of the 1st Workshop on Machine Learning for Ancient Languages (ML4AL 2024)
%D 2024
%8 August
%I Association for Computational Linguistics
%C Hybrid in Bangkok, Thailand and online
%F cao-etal-2024-deep
%X This work explores the potential of Transformer models focusing on the translation of ancient Egyptian hieroglyphs. We present a novel Hieroglyphic Transformer model, built upon the powerful M2M-100 multilingual translation framework and trained on a dataset we customised from the Thesaurus Linguae Aegyptiae database. Our experiments demonstrate promising results, with the model achieving significant accuracy in translating hieroglyphics into both German and English. This work holds significant implications for Egyptology, potentially accelerating the translation process and unlocking new research approaches.
%R 10.18653/v1/2024.ml4al-1.9
%U https://aclanthology.org/2024.ml4al-1.9/
%U https://doi.org/10.18653/v1/2024.ml4al-1.9
%P 71-86
Markdown (Informal)
[Deep Learning Meets Egyptology: a Hieroglyphic Transformer for Translating Ancient Egyptian](https://aclanthology.org/2024.ml4al-1.9/) (De Cao et al., ML4AL 2024)
ACL