@inproceedings{gu-etal-2023-co,
title = "Co-evolving data-driven and {NLU}-driven Synthesizers for Generating Code in Domain Growth and Data Scarcity",
author = "Gu, Jiasheng and
Nan, Zifan and
Peng, Zhiyuan and
Shen, Xipeng and
Xu, Dongkuan",
editor = "Surdeanu, Mihai and
Riloff, Ellen and
Chiticariu, Laura and
Frietag, Dayne and
Hahn-Powell, Gus and
Morrison, Clayton T. and
Noriega-Atala, Enrique and
Sharp, Rebecca and
Valenzuela-Escarcega, Marco",
booktitle = "Proceedings of the 2nd Workshop on Pattern-based Approaches to NLP in the Age of Deep Learning",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2023.pandl-1.7",
doi = "10.18653/v1/2023.pandl-1.7",
pages = "64--74",
abstract = "Natural language programming automatically generates code based on a user{'}s text query. Recent solutions are either data-driven or natural language understanding (NLU)-driven. However, the data-driven synthesizer requires a large number of query-code pairs for training, which hinders its application to low-resource programming languages with growing domains whose functionality and grammar can be actively updated. NLU-driven synthesizers solve this problem, but their code generation is slow and their performance rapidly saturates in the presence of ever-increasing data. In this paper, we propose a circular training framework, Colead, which co-evolves both the data-driven synthesizer and the NLU-driven synthesizer to achieve high-quality code generation in the presence of data scarcity and domain growth. The NLU-driven synthesizer generates query-code pairs to update the data-driven synthesizer, which shares a part of its updated model to improve the NLU-driven synthesizers, enabling the co-evolution of both. Experiments show that Colead gives better results than the baselines in the presence of domain growth and data scarcity, and Colead consistently improves the performance of both data-driven and NLU-driven synthesizers over the co-evolvement.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="gu-etal-2023-co">
<titleInfo>
<title>Co-evolving data-driven and NLU-driven Synthesizers for Generating Code in Domain Growth and Data Scarcity</title>
</titleInfo>
<name type="personal">
<namePart type="given">Jiasheng</namePart>
<namePart type="family">Gu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zifan</namePart>
<namePart type="family">Nan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zhiyuan</namePart>
<namePart type="family">Peng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xipeng</namePart>
<namePart type="family">Shen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Dongkuan</namePart>
<namePart type="family">Xu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2023-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2nd Workshop on Pattern-based Approaches to NLP in the Age of Deep Learning</title>
</titleInfo>
<name type="personal">
<namePart type="given">Mihai</namePart>
<namePart type="family">Surdeanu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ellen</namePart>
<namePart type="family">Riloff</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Laura</namePart>
<namePart type="family">Chiticariu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Dayne</namePart>
<namePart type="family">Frietag</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Gus</namePart>
<namePart type="family">Hahn-Powell</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Clayton</namePart>
<namePart type="given">T</namePart>
<namePart type="family">Morrison</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Enrique</namePart>
<namePart type="family">Noriega-Atala</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Rebecca</namePart>
<namePart type="family">Sharp</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Marco</namePart>
<namePart type="family">Valenzuela-Escarcega</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Singapore</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Natural language programming automatically generates code based on a user’s text query. Recent solutions are either data-driven or natural language understanding (NLU)-driven. However, the data-driven synthesizer requires a large number of query-code pairs for training, which hinders its application to low-resource programming languages with growing domains whose functionality and grammar can be actively updated. NLU-driven synthesizers solve this problem, but their code generation is slow and their performance rapidly saturates in the presence of ever-increasing data. In this paper, we propose a circular training framework, Colead, which co-evolves both the data-driven synthesizer and the NLU-driven synthesizer to achieve high-quality code generation in the presence of data scarcity and domain growth. The NLU-driven synthesizer generates query-code pairs to update the data-driven synthesizer, which shares a part of its updated model to improve the NLU-driven synthesizers, enabling the co-evolution of both. Experiments show that Colead gives better results than the baselines in the presence of domain growth and data scarcity, and Colead consistently improves the performance of both data-driven and NLU-driven synthesizers over the co-evolvement.</abstract>
<identifier type="citekey">gu-etal-2023-co</identifier>
<identifier type="doi">10.18653/v1/2023.pandl-1.7</identifier>
<location>
<url>https://aclanthology.org/2023.pandl-1.7</url>
</location>
<part>
<date>2023-12</date>
<extent unit="page">
<start>64</start>
<end>74</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Co-evolving data-driven and NLU-driven Synthesizers for Generating Code in Domain Growth and Data Scarcity
%A Gu, Jiasheng
%A Nan, Zifan
%A Peng, Zhiyuan
%A Shen, Xipeng
%A Xu, Dongkuan
%Y Surdeanu, Mihai
%Y Riloff, Ellen
%Y Chiticariu, Laura
%Y Frietag, Dayne
%Y Hahn-Powell, Gus
%Y Morrison, Clayton T.
%Y Noriega-Atala, Enrique
%Y Sharp, Rebecca
%Y Valenzuela-Escarcega, Marco
%S Proceedings of the 2nd Workshop on Pattern-based Approaches to NLP in the Age of Deep Learning
%D 2023
%8 December
%I Association for Computational Linguistics
%C Singapore
%F gu-etal-2023-co
%X Natural language programming automatically generates code based on a user’s text query. Recent solutions are either data-driven or natural language understanding (NLU)-driven. However, the data-driven synthesizer requires a large number of query-code pairs for training, which hinders its application to low-resource programming languages with growing domains whose functionality and grammar can be actively updated. NLU-driven synthesizers solve this problem, but their code generation is slow and their performance rapidly saturates in the presence of ever-increasing data. In this paper, we propose a circular training framework, Colead, which co-evolves both the data-driven synthesizer and the NLU-driven synthesizer to achieve high-quality code generation in the presence of data scarcity and domain growth. The NLU-driven synthesizer generates query-code pairs to update the data-driven synthesizer, which shares a part of its updated model to improve the NLU-driven synthesizers, enabling the co-evolution of both. Experiments show that Colead gives better results than the baselines in the presence of domain growth and data scarcity, and Colead consistently improves the performance of both data-driven and NLU-driven synthesizers over the co-evolvement.
%R 10.18653/v1/2023.pandl-1.7
%U https://aclanthology.org/2023.pandl-1.7
%U https://doi.org/10.18653/v1/2023.pandl-1.7
%P 64-74
Markdown (Informal)
[Co-evolving data-driven and NLU-driven Synthesizers for Generating Code in Domain Growth and Data Scarcity](https://aclanthology.org/2023.pandl-1.7) (Gu et al., PANDL-WS 2023)
ACL