@inproceedings{jurgens-2021-learning-word,
title = "Learning about Word Vector Representations and Deep Learning through Implementing Word2vec",
author = "Jurgens, David",
editor = "Jurgens, David and
Kolhatkar, Varada and
Li, Lucy and
Mieskes, Margot and
Pedersen, Ted",
booktitle = "Proceedings of the Fifth Workshop on Teaching NLP",
month = jun,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.teachingnlp-1.19",
doi = "10.18653/v1/2021.teachingnlp-1.19",
pages = "108--111",
abstract = "Word vector representations are an essential part of an NLP curriculum. Here, we describe a homework that has students implement a popular method for learning word vectors, word2vec. Students implement the core parts of the method, including text preprocessing, negative sampling, and gradient descent. Starter code provides guidance and handles basic operations, which allows students to focus on the conceptually challenging aspects. After generating their vectors, students evaluate them using qualitative and quantitative tests.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="jurgens-2021-learning-word">
<titleInfo>
<title>Learning about Word Vector Representations and Deep Learning through Implementing Word2vec</title>
</titleInfo>
<name type="personal">
<namePart type="given">David</namePart>
<namePart type="family">Jurgens</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-06</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fifth Workshop on Teaching NLP</title>
</titleInfo>
<name type="personal">
<namePart type="given">David</namePart>
<namePart type="family">Jurgens</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Varada</namePart>
<namePart type="family">Kolhatkar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Lucy</namePart>
<namePart type="family">Li</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Margot</namePart>
<namePart type="family">Mieskes</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ted</namePart>
<namePart type="family">Pedersen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Word vector representations are an essential part of an NLP curriculum. Here, we describe a homework that has students implement a popular method for learning word vectors, word2vec. Students implement the core parts of the method, including text preprocessing, negative sampling, and gradient descent. Starter code provides guidance and handles basic operations, which allows students to focus on the conceptually challenging aspects. After generating their vectors, students evaluate them using qualitative and quantitative tests.</abstract>
<identifier type="citekey">jurgens-2021-learning-word</identifier>
<identifier type="doi">10.18653/v1/2021.teachingnlp-1.19</identifier>
<location>
<url>https://aclanthology.org/2021.teachingnlp-1.19</url>
</location>
<part>
<date>2021-06</date>
<extent unit="page">
<start>108</start>
<end>111</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Learning about Word Vector Representations and Deep Learning through Implementing Word2vec
%A Jurgens, David
%Y Jurgens, David
%Y Kolhatkar, Varada
%Y Li, Lucy
%Y Mieskes, Margot
%Y Pedersen, Ted
%S Proceedings of the Fifth Workshop on Teaching NLP
%D 2021
%8 June
%I Association for Computational Linguistics
%C Online
%F jurgens-2021-learning-word
%X Word vector representations are an essential part of an NLP curriculum. Here, we describe a homework that has students implement a popular method for learning word vectors, word2vec. Students implement the core parts of the method, including text preprocessing, negative sampling, and gradient descent. Starter code provides guidance and handles basic operations, which allows students to focus on the conceptually challenging aspects. After generating their vectors, students evaluate them using qualitative and quantitative tests.
%R 10.18653/v1/2021.teachingnlp-1.19
%U https://aclanthology.org/2021.teachingnlp-1.19
%U https://doi.org/10.18653/v1/2021.teachingnlp-1.19
%P 108-111
Markdown (Informal)
[Learning about Word Vector Representations and Deep Learning through Implementing Word2vec](https://aclanthology.org/2021.teachingnlp-1.19) (Jurgens, TeachingNLP 2021)
ACL