@inproceedings{natarajan-etal-2020-neighbor,
title = "Neighbor Contextual Information Learners for Joint Intent and Slot Prediction",
author = "Natarajan, Bharatram and
Mathur, Gaurav and
Jain, Sameer",
editor = "S, Praveen Kumar G and
Mukherjee, Siddhartha and
Samal, Ranjan",
booktitle = "Proceedings of the Workshop on Joint NLP Modelling for Conversational AI @ ICON 2020",
month = dec,
year = "2020",
address = "Patna, India",
publisher = "NLP Association of India (NLPAI)",
url = "https://aclanthology.org/2020.icon-workshop.1",
pages = "1--9",
abstract = "Intent Identification and Slot Identification aretwo important task for Natural Language Understanding(NLU). Exploration in this areahave gained significance using networks likeRNN, LSTM and GRU. However, modelscontaining the above modules are sequentialin nature, which consumes lot of resourceslike memory to train the model in cloud itself. With the advent of many voice assistantsdelivering offline solutions for manyapplications, there is a need for finding replacementfor such sequential networks. Explorationin self-attention, CNN modules hasgained pace in the recent times. Here we exploreCNN based models like Trellis and modifiedthe architecture to make it bi-directionalwith fusion techniques. In addition, we proposeCNN with Self Attention network calledNeighbor Contextual Information Projector usingMulti Head Attention (NCIPMA) architecture. These architectures beat state of the art inopen source datasets like ATIS, SNIPS.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="natarajan-etal-2020-neighbor">
<titleInfo>
<title>Neighbor Contextual Information Learners for Joint Intent and Slot Prediction</title>
</titleInfo>
<name type="personal">
<namePart type="given">Bharatram</namePart>
<namePart type="family">Natarajan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Gaurav</namePart>
<namePart type="family">Mathur</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sameer</namePart>
<namePart type="family">Jain</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Workshop on Joint NLP Modelling for Conversational AI @ ICON 2020</title>
</titleInfo>
<name type="personal">
<namePart type="given">Praveen</namePart>
<namePart type="given">Kumar</namePart>
<namePart type="given">G</namePart>
<namePart type="family">S</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Siddhartha</namePart>
<namePart type="family">Mukherjee</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ranjan</namePart>
<namePart type="family">Samal</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>NLP Association of India (NLPAI)</publisher>
<place>
<placeTerm type="text">Patna, India</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Intent Identification and Slot Identification aretwo important task for Natural Language Understanding(NLU). Exploration in this areahave gained significance using networks likeRNN, LSTM and GRU. However, modelscontaining the above modules are sequentialin nature, which consumes lot of resourceslike memory to train the model in cloud itself. With the advent of many voice assistantsdelivering offline solutions for manyapplications, there is a need for finding replacementfor such sequential networks. Explorationin self-attention, CNN modules hasgained pace in the recent times. Here we exploreCNN based models like Trellis and modifiedthe architecture to make it bi-directionalwith fusion techniques. In addition, we proposeCNN with Self Attention network calledNeighbor Contextual Information Projector usingMulti Head Attention (NCIPMA) architecture. These architectures beat state of the art inopen source datasets like ATIS, SNIPS.</abstract>
<identifier type="citekey">natarajan-etal-2020-neighbor</identifier>
<location>
<url>https://aclanthology.org/2020.icon-workshop.1</url>
</location>
<part>
<date>2020-12</date>
<extent unit="page">
<start>1</start>
<end>9</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Neighbor Contextual Information Learners for Joint Intent and Slot Prediction
%A Natarajan, Bharatram
%A Mathur, Gaurav
%A Jain, Sameer
%Y S, Praveen Kumar G.
%Y Mukherjee, Siddhartha
%Y Samal, Ranjan
%S Proceedings of the Workshop on Joint NLP Modelling for Conversational AI @ ICON 2020
%D 2020
%8 December
%I NLP Association of India (NLPAI)
%C Patna, India
%F natarajan-etal-2020-neighbor
%X Intent Identification and Slot Identification aretwo important task for Natural Language Understanding(NLU). Exploration in this areahave gained significance using networks likeRNN, LSTM and GRU. However, modelscontaining the above modules are sequentialin nature, which consumes lot of resourceslike memory to train the model in cloud itself. With the advent of many voice assistantsdelivering offline solutions for manyapplications, there is a need for finding replacementfor such sequential networks. Explorationin self-attention, CNN modules hasgained pace in the recent times. Here we exploreCNN based models like Trellis and modifiedthe architecture to make it bi-directionalwith fusion techniques. In addition, we proposeCNN with Self Attention network calledNeighbor Contextual Information Projector usingMulti Head Attention (NCIPMA) architecture. These architectures beat state of the art inopen source datasets like ATIS, SNIPS.
%U https://aclanthology.org/2020.icon-workshop.1
%P 1-9
Markdown (Informal)
[Neighbor Contextual Information Learners for Joint Intent and Slot Prediction](https://aclanthology.org/2020.icon-workshop.1) (Natarajan et al., ICON 2020)
ACL