@inproceedings{feng-etal-2021-cryptogru,
title = "{CRYPTOGRU}: Low Latency Privacy-Preserving Text Analysis With {GRU}",
author = "Feng, Bo and
Lou, Qian and
Jiang, Lei and
Fox, Geoffrey",
booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2021",
address = "Online and Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.emnlp-main.156",
doi = "10.18653/v1/2021.emnlp-main.156",
pages = "2052--2057",
abstract = "Homomorphic encryption (HE) and garbled circuit (GC) provide the protection for users{'} privacy. However, simply mixing the HE and GC in RNN models suffer from long inference latency due to slow activation functions. In this paper, we present a novel hybrid structure of HE and GC gated recurrent unit (GRU) network, , for low-latency secure inferences. replaces computationally expensive GC-based $tanh$ with fast GC-based $ReLU$, and then quantizes $sigmoid$ and $ReLU$ to smaller bit-length to accelerate activations in a GRU. We evaluate with multiple GRU models trained on 4 public datasets. Experimental results show achieves top-notch accuracy and improves the secure inference latency by up to $138\times$ over one of the state-of-the-art secure networks on the Penn Treebank dataset.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="feng-etal-2021-cryptogru">
<titleInfo>
<title>CRYPTOGRU: Low Latency Privacy-Preserving Text Analysis With GRU</title>
</titleInfo>
<name type="personal">
<namePart type="given">Bo</namePart>
<namePart type="family">Feng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Qian</namePart>
<namePart type="family">Lou</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Lei</namePart>
<namePart type="family">Jiang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Geoffrey</namePart>
<namePart type="family">Fox</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online and Punta Cana, Dominican Republic</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Homomorphic encryption (HE) and garbled circuit (GC) provide the protection for users’ privacy. However, simply mixing the HE and GC in RNN models suffer from long inference latency due to slow activation functions. In this paper, we present a novel hybrid structure of HE and GC gated recurrent unit (GRU) network, , for low-latency secure inferences. replaces computationally expensive GC-based tanh with fast GC-based ReLU, and then quantizes sigmoid and ReLU to smaller bit-length to accelerate activations in a GRU. We evaluate with multiple GRU models trained on 4 public datasets. Experimental results show achieves top-notch accuracy and improves the secure inference latency by up to 138\times over one of the state-of-the-art secure networks on the Penn Treebank dataset.</abstract>
<identifier type="citekey">feng-etal-2021-cryptogru</identifier>
<identifier type="doi">10.18653/v1/2021.emnlp-main.156</identifier>
<location>
<url>https://aclanthology.org/2021.emnlp-main.156</url>
</location>
<part>
<date>2021-11</date>
<extent unit="page">
<start>2052</start>
<end>2057</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T CRYPTOGRU: Low Latency Privacy-Preserving Text Analysis With GRU
%A Feng, Bo
%A Lou, Qian
%A Jiang, Lei
%A Fox, Geoffrey
%S Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing
%D 2021
%8 November
%I Association for Computational Linguistics
%C Online and Punta Cana, Dominican Republic
%F feng-etal-2021-cryptogru
%X Homomorphic encryption (HE) and garbled circuit (GC) provide the protection for users’ privacy. However, simply mixing the HE and GC in RNN models suffer from long inference latency due to slow activation functions. In this paper, we present a novel hybrid structure of HE and GC gated recurrent unit (GRU) network, , for low-latency secure inferences. replaces computationally expensive GC-based tanh with fast GC-based ReLU, and then quantizes sigmoid and ReLU to smaller bit-length to accelerate activations in a GRU. We evaluate with multiple GRU models trained on 4 public datasets. Experimental results show achieves top-notch accuracy and improves the secure inference latency by up to 138\times over one of the state-of-the-art secure networks on the Penn Treebank dataset.
%R 10.18653/v1/2021.emnlp-main.156
%U https://aclanthology.org/2021.emnlp-main.156
%U https://doi.org/10.18653/v1/2021.emnlp-main.156
%P 2052-2057
Markdown (Informal)
[CRYPTOGRU: Low Latency Privacy-Preserving Text Analysis With GRU](https://aclanthology.org/2021.emnlp-main.156) (Feng et al., EMNLP 2021)
ACL