@inproceedings{furrer-etal-2019-uzh,
title = "{UZH}@{CRAFT}-{ST}: a Sequence-labeling Approach to Concept Recognition",
author = "Furrer, Lenz and
Cornelius, Joseph and
Rinaldi, Fabio",
editor = "Jin-Dong, Kim and
Claire, N{\'e}dellec and
Robert, Bossy and
Louise, Del{\'e}ger",
booktitle = "Proceedings of the 5th Workshop on BioNLP Open Shared Tasks",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/D19-5726",
doi = "10.18653/v1/D19-5726",
pages = "185--195",
abstract = "As our submission to the CRAFT shared task 2019, we present two neural approaches to concept recognition. We propose two different systems for joint named entity recognition (NER) and normalization (NEN), both of which model the task as a sequence labeling problem. Our first system is a BiLSTM network with two separate outputs for NER and NEN trained from scratch, whereas the second system is an instance of BioBERT fine-tuned on the concept-recognition task. We exploit two strategies for extending concept coverage, ontology pretraining and backoff with a dictionary lookup. Our results show that the backoff strategy effectively tackles the problem of unseen concepts, addressing a major limitation of the chosen design. In the cross-system comparison, BioBERT proves to be a strong basis for creating a concept-recognition system, although some entity types are predicted more accurately by the BiLSTM-based system.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="furrer-etal-2019-uzh">
<titleInfo>
<title>UZH@CRAFT-ST: a Sequence-labeling Approach to Concept Recognition</title>
</titleInfo>
<name type="personal">
<namePart type="given">Lenz</namePart>
<namePart type="family">Furrer</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Joseph</namePart>
<namePart type="family">Cornelius</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Fabio</namePart>
<namePart type="family">Rinaldi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 5th Workshop on BioNLP Open Shared Tasks</title>
</titleInfo>
<name type="personal">
<namePart type="given">Kim</namePart>
<namePart type="family">Jin-Dong</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Nédellec</namePart>
<namePart type="family">Claire</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Bossy</namePart>
<namePart type="family">Robert</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Deléger</namePart>
<namePart type="family">Louise</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Hong Kong, China</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>As our submission to the CRAFT shared task 2019, we present two neural approaches to concept recognition. We propose two different systems for joint named entity recognition (NER) and normalization (NEN), both of which model the task as a sequence labeling problem. Our first system is a BiLSTM network with two separate outputs for NER and NEN trained from scratch, whereas the second system is an instance of BioBERT fine-tuned on the concept-recognition task. We exploit two strategies for extending concept coverage, ontology pretraining and backoff with a dictionary lookup. Our results show that the backoff strategy effectively tackles the problem of unseen concepts, addressing a major limitation of the chosen design. In the cross-system comparison, BioBERT proves to be a strong basis for creating a concept-recognition system, although some entity types are predicted more accurately by the BiLSTM-based system.</abstract>
<identifier type="citekey">furrer-etal-2019-uzh</identifier>
<identifier type="doi">10.18653/v1/D19-5726</identifier>
<location>
<url>https://aclanthology.org/D19-5726</url>
</location>
<part>
<date>2019-11</date>
<extent unit="page">
<start>185</start>
<end>195</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T UZH@CRAFT-ST: a Sequence-labeling Approach to Concept Recognition
%A Furrer, Lenz
%A Cornelius, Joseph
%A Rinaldi, Fabio
%Y Jin-Dong, Kim
%Y Claire, Nédellec
%Y Robert, Bossy
%Y Louise, Deléger
%S Proceedings of the 5th Workshop on BioNLP Open Shared Tasks
%D 2019
%8 November
%I Association for Computational Linguistics
%C Hong Kong, China
%F furrer-etal-2019-uzh
%X As our submission to the CRAFT shared task 2019, we present two neural approaches to concept recognition. We propose two different systems for joint named entity recognition (NER) and normalization (NEN), both of which model the task as a sequence labeling problem. Our first system is a BiLSTM network with two separate outputs for NER and NEN trained from scratch, whereas the second system is an instance of BioBERT fine-tuned on the concept-recognition task. We exploit two strategies for extending concept coverage, ontology pretraining and backoff with a dictionary lookup. Our results show that the backoff strategy effectively tackles the problem of unseen concepts, addressing a major limitation of the chosen design. In the cross-system comparison, BioBERT proves to be a strong basis for creating a concept-recognition system, although some entity types are predicted more accurately by the BiLSTM-based system.
%R 10.18653/v1/D19-5726
%U https://aclanthology.org/D19-5726
%U https://doi.org/10.18653/v1/D19-5726
%P 185-195
Markdown (Informal)
[UZH@CRAFT-ST: a Sequence-labeling Approach to Concept Recognition](https://aclanthology.org/D19-5726) (Furrer et al., BioNLP 2019)
ACL