@inproceedings{guo-etal-2021-improving-numerical,
title = "Improving Numerical Reasoning Skills in the Modular Approach for Complex Question Answering on Text",
author = "Guo, Xiao-Yu and
Li, Yuan-Fang and
Haffari, Gholamreza",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2021",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.findings-emnlp.231",
doi = "10.18653/v1/2021.findings-emnlp.231",
pages = "2713--2718",
abstract = "Numerical reasoning skills are essential for complex question answering (CQA) over text. It requires opertaions including counting, comparison, addition and subtraction. A successful approach to CQA on text, Neural Module Networks (NMNs), follows the programmer-interpreter paradigm and leverages specialised modules to perform compositional reasoning. However, the NMNs framework does not consider the relationship between numbers and entities in both questions and paragraphs. We propose effective techniques to improve NMNs{'} numerical reasoning capabilities by making the interpreter question-aware and capturing the relationship between entities and numbers. On the same subset of the DROP dataset for CQA on text, experimental results show that our additions outperform the original NMNs by 3.0 points for the overall F1 score.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="guo-etal-2021-improving-numerical">
<titleInfo>
<title>Improving Numerical Reasoning Skills in the Modular Approach for Complex Question Answering on Text</title>
</titleInfo>
<name type="personal">
<namePart type="given">Xiao-Yu</namePart>
<namePart type="family">Guo</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yuan-Fang</namePart>
<namePart type="family">Li</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Gholamreza</namePart>
<namePart type="family">Haffari</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Findings of the Association for Computational Linguistics: EMNLP 2021</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Punta Cana, Dominican Republic</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Numerical reasoning skills are essential for complex question answering (CQA) over text. It requires opertaions including counting, comparison, addition and subtraction. A successful approach to CQA on text, Neural Module Networks (NMNs), follows the programmer-interpreter paradigm and leverages specialised modules to perform compositional reasoning. However, the NMNs framework does not consider the relationship between numbers and entities in both questions and paragraphs. We propose effective techniques to improve NMNs’ numerical reasoning capabilities by making the interpreter question-aware and capturing the relationship between entities and numbers. On the same subset of the DROP dataset for CQA on text, experimental results show that our additions outperform the original NMNs by 3.0 points for the overall F1 score.</abstract>
<identifier type="citekey">guo-etal-2021-improving-numerical</identifier>
<identifier type="doi">10.18653/v1/2021.findings-emnlp.231</identifier>
<location>
<url>https://aclanthology.org/2021.findings-emnlp.231</url>
</location>
<part>
<date>2021-11</date>
<extent unit="page">
<start>2713</start>
<end>2718</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Improving Numerical Reasoning Skills in the Modular Approach for Complex Question Answering on Text
%A Guo, Xiao-Yu
%A Li, Yuan-Fang
%A Haffari, Gholamreza
%S Findings of the Association for Computational Linguistics: EMNLP 2021
%D 2021
%8 November
%I Association for Computational Linguistics
%C Punta Cana, Dominican Republic
%F guo-etal-2021-improving-numerical
%X Numerical reasoning skills are essential for complex question answering (CQA) over text. It requires opertaions including counting, comparison, addition and subtraction. A successful approach to CQA on text, Neural Module Networks (NMNs), follows the programmer-interpreter paradigm and leverages specialised modules to perform compositional reasoning. However, the NMNs framework does not consider the relationship between numbers and entities in both questions and paragraphs. We propose effective techniques to improve NMNs’ numerical reasoning capabilities by making the interpreter question-aware and capturing the relationship between entities and numbers. On the same subset of the DROP dataset for CQA on text, experimental results show that our additions outperform the original NMNs by 3.0 points for the overall F1 score.
%R 10.18653/v1/2021.findings-emnlp.231
%U https://aclanthology.org/2021.findings-emnlp.231
%U https://doi.org/10.18653/v1/2021.findings-emnlp.231
%P 2713-2718
Markdown (Informal)
[Improving Numerical Reasoning Skills in the Modular Approach for Complex Question Answering on Text](https://aclanthology.org/2021.findings-emnlp.231) (Guo et al., Findings 2021)
ACL