@inproceedings{perkins-etal-2017-learning,
title = "Learning an Input Filter for Argument Structure Acquisition",
author = "Perkins, Laurel and
Feldman, Naomi and
Lidz, Jeffrey",
editor = "Gibson, Ted and
Linzen, Tal and
Sayeed, Asad and
van Schijndel, Martin and
Schuler, William",
booktitle = "Proceedings of the 7th Workshop on Cognitive Modeling and Computational Linguistics ({CMCL} 2017)",
month = apr,
year = "2017",
address = "Valencia, Spain",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W17-0702",
doi = "10.18653/v1/W17-0702",
pages = "11--19",
abstract = "How do children learn a verb{'}s argument structure when their input contains nonbasic clauses that obscure verb transitivity? Here we present a new model that infers verb transitivity by learning to filter out non-basic clauses that were likely parsed in error. In simulations with child-directed speech, we show that this model accurately categorizes the majority of 50 frequent transitive, intransitive and alternating verbs, and jointly learns appropriate parameters for filtering parsing errors. Our model is thus able to filter out problematic data for verb learning without knowing in advance which data need to be filtered.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="perkins-etal-2017-learning">
<titleInfo>
<title>Learning an Input Filter for Argument Structure Acquisition</title>
</titleInfo>
<name type="personal">
<namePart type="given">Laurel</namePart>
<namePart type="family">Perkins</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Naomi</namePart>
<namePart type="family">Feldman</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jeffrey</namePart>
<namePart type="family">Lidz</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2017-04</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 7th Workshop on Cognitive Modeling and Computational Linguistics (CMCL 2017)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Ted</namePart>
<namePart type="family">Gibson</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tal</namePart>
<namePart type="family">Linzen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Asad</namePart>
<namePart type="family">Sayeed</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Martin</namePart>
<namePart type="family">van Schijndel</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">William</namePart>
<namePart type="family">Schuler</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Valencia, Spain</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>How do children learn a verb’s argument structure when their input contains nonbasic clauses that obscure verb transitivity? Here we present a new model that infers verb transitivity by learning to filter out non-basic clauses that were likely parsed in error. In simulations with child-directed speech, we show that this model accurately categorizes the majority of 50 frequent transitive, intransitive and alternating verbs, and jointly learns appropriate parameters for filtering parsing errors. Our model is thus able to filter out problematic data for verb learning without knowing in advance which data need to be filtered.</abstract>
<identifier type="citekey">perkins-etal-2017-learning</identifier>
<identifier type="doi">10.18653/v1/W17-0702</identifier>
<location>
<url>https://aclanthology.org/W17-0702</url>
</location>
<part>
<date>2017-04</date>
<extent unit="page">
<start>11</start>
<end>19</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Learning an Input Filter for Argument Structure Acquisition
%A Perkins, Laurel
%A Feldman, Naomi
%A Lidz, Jeffrey
%Y Gibson, Ted
%Y Linzen, Tal
%Y Sayeed, Asad
%Y van Schijndel, Martin
%Y Schuler, William
%S Proceedings of the 7th Workshop on Cognitive Modeling and Computational Linguistics (CMCL 2017)
%D 2017
%8 April
%I Association for Computational Linguistics
%C Valencia, Spain
%F perkins-etal-2017-learning
%X How do children learn a verb’s argument structure when their input contains nonbasic clauses that obscure verb transitivity? Here we present a new model that infers verb transitivity by learning to filter out non-basic clauses that were likely parsed in error. In simulations with child-directed speech, we show that this model accurately categorizes the majority of 50 frequent transitive, intransitive and alternating verbs, and jointly learns appropriate parameters for filtering parsing errors. Our model is thus able to filter out problematic data for verb learning without knowing in advance which data need to be filtered.
%R 10.18653/v1/W17-0702
%U https://aclanthology.org/W17-0702
%U https://doi.org/10.18653/v1/W17-0702
%P 11-19
Markdown (Informal)
[Learning an Input Filter for Argument Structure Acquisition](https://aclanthology.org/W17-0702) (Perkins et al., CMCL 2017)
ACL