@article{gormley-etal-2015-approximation,
title = "Approximation-Aware Dependency Parsing by Belief Propagation",
author = "Gormley, Matthew R. and
Dredze, Mark and
Eisner, Jason",
editor = "Collins, Michael and
Lee, Lillian",
journal = "Transactions of the Association for Computational Linguistics",
volume = "3",
year = "2015",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://aclanthology.org/Q15-1035",
doi = "10.1162/tacl_a_00153",
pages = "489--501",
abstract = "We show how to train the fast dependency parser of Smith and Eisner (2008) for improved accuracy. This parser can consider higher-order interactions among edges while retaining O(n3) runtime. It outputs the parse with maximum expected recall{---}but for speed, this expectation is taken under a posterior distribution that is constructed only approximately, using loopy belief propagation through structured factors. We show how to adjust the model parameters to compensate for the errors introduced by this approximation, by following the gradient of the actual loss on training data. We find this gradient by back-propagation. That is, we treat the entire parser (approximations and all) as a differentiable circuit, as others have done for loopy CRFs (Domke, 2010; Stoyanov et al., 2011; Domke, 2011; Stoyanov and Eisner, 2012). The resulting parser obtains higher accuracy with fewer iterations of belief propagation than one trained by conditional log-likelihood.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="gormley-etal-2015-approximation">
<titleInfo>
<title>Approximation-Aware Dependency Parsing by Belief Propagation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Matthew</namePart>
<namePart type="given">R</namePart>
<namePart type="family">Gormley</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mark</namePart>
<namePart type="family">Dredze</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jason</namePart>
<namePart type="family">Eisner</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2015</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<genre authority="bibutilsgt">journal article</genre>
<relatedItem type="host">
<titleInfo>
<title>Transactions of the Association for Computational Linguistics</title>
</titleInfo>
<originInfo>
<issuance>continuing</issuance>
<publisher>MIT Press</publisher>
<place>
<placeTerm type="text">Cambridge, MA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">periodical</genre>
<genre authority="bibutilsgt">academic journal</genre>
</relatedItem>
<abstract>We show how to train the fast dependency parser of Smith and Eisner (2008) for improved accuracy. This parser can consider higher-order interactions among edges while retaining O(n3) runtime. It outputs the parse with maximum expected recall—but for speed, this expectation is taken under a posterior distribution that is constructed only approximately, using loopy belief propagation through structured factors. We show how to adjust the model parameters to compensate for the errors introduced by this approximation, by following the gradient of the actual loss on training data. We find this gradient by back-propagation. That is, we treat the entire parser (approximations and all) as a differentiable circuit, as others have done for loopy CRFs (Domke, 2010; Stoyanov et al., 2011; Domke, 2011; Stoyanov and Eisner, 2012). The resulting parser obtains higher accuracy with fewer iterations of belief propagation than one trained by conditional log-likelihood.</abstract>
<identifier type="citekey">gormley-etal-2015-approximation</identifier>
<identifier type="doi">10.1162/tacl_a_00153</identifier>
<location>
<url>https://aclanthology.org/Q15-1035</url>
</location>
<part>
<date>2015</date>
<detail type="volume"><number>3</number></detail>
<extent unit="page">
<start>489</start>
<end>501</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Journal Article
%T Approximation-Aware Dependency Parsing by Belief Propagation
%A Gormley, Matthew R.
%A Dredze, Mark
%A Eisner, Jason
%J Transactions of the Association for Computational Linguistics
%D 2015
%V 3
%I MIT Press
%C Cambridge, MA
%F gormley-etal-2015-approximation
%X We show how to train the fast dependency parser of Smith and Eisner (2008) for improved accuracy. This parser can consider higher-order interactions among edges while retaining O(n3) runtime. It outputs the parse with maximum expected recall—but for speed, this expectation is taken under a posterior distribution that is constructed only approximately, using loopy belief propagation through structured factors. We show how to adjust the model parameters to compensate for the errors introduced by this approximation, by following the gradient of the actual loss on training data. We find this gradient by back-propagation. That is, we treat the entire parser (approximations and all) as a differentiable circuit, as others have done for loopy CRFs (Domke, 2010; Stoyanov et al., 2011; Domke, 2011; Stoyanov and Eisner, 2012). The resulting parser obtains higher accuracy with fewer iterations of belief propagation than one trained by conditional log-likelihood.
%R 10.1162/tacl_a_00153
%U https://aclanthology.org/Q15-1035
%U https://doi.org/10.1162/tacl_a_00153
%P 489-501
Markdown (Informal)
[Approximation-Aware Dependency Parsing by Belief Propagation](https://aclanthology.org/Q15-1035) (Gormley et al., TACL 2015)
ACL