@inproceedings{micluta-campeanu-2026-novel-drivel,
title = "Novel or Drivel? Variants of Invariants for Teaching {NLP} in the {LLM} Era",
author = "Micluța-C{\^a}mpeanu, Marius",
editor = {A{\ss}enmacher, Matthias and
Biester, Laura and
Borg, Claudia and
Kov{\'a}cs, Gy{\"o}rgy and
Mieskes, Margot and
Serrano, Sofia},
booktitle = "Proceedings of the Seventh Workshop on Teaching Natural Language Processing ({T}each{NLP} 2026)",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2026.teachingnlp-1.17/",
pages = "129--133",
ISBN = "979-8-89176-375-3",
abstract = "The ubiquitous adoption of large language models by students prompts teachers to redesign courses and evaluation methods, especially in computer science and natural language processing (NLP) where the impact is more tangible.Our contribution is two-fold. First, we attempt to define invariants for the role of education itself given the over-abundance of information that appears to be more accessible than ever before. Then, we present our approach and materials used for an introductory course in NLP for undergraduate students, drawing inspiration from software engineering best practices. Our vision regarding large language models is torely on local models to cultivate a sense of ownership and sovereignty in an age where every bit of independence and privacy get eroded."
}<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="micluta-campeanu-2026-novel-drivel">
<titleInfo>
<title>Novel or Drivel? Variants of Invariants for Teaching NLP in the LLM Era</title>
</titleInfo>
<name type="personal">
<namePart type="given">Marius</namePart>
<namePart type="family">Micluța-Câmpeanu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2026-03</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Seventh Workshop on Teaching Natural Language Processing (TeachNLP 2026)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Matthias</namePart>
<namePart type="family">Aßenmacher</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Laura</namePart>
<namePart type="family">Biester</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Claudia</namePart>
<namePart type="family">Borg</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">György</namePart>
<namePart type="family">Kovács</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Margot</namePart>
<namePart type="family">Mieskes</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sofia</namePart>
<namePart type="family">Serrano</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Rabat, Morocco</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
<identifier type="isbn">979-8-89176-375-3</identifier>
</relatedItem>
<abstract>The ubiquitous adoption of large language models by students prompts teachers to redesign courses and evaluation methods, especially in computer science and natural language processing (NLP) where the impact is more tangible.Our contribution is two-fold. First, we attempt to define invariants for the role of education itself given the over-abundance of information that appears to be more accessible than ever before. Then, we present our approach and materials used for an introductory course in NLP for undergraduate students, drawing inspiration from software engineering best practices. Our vision regarding large language models is torely on local models to cultivate a sense of ownership and sovereignty in an age where every bit of independence and privacy get eroded.</abstract>
<identifier type="citekey">micluta-campeanu-2026-novel-drivel</identifier>
<location>
<url>https://aclanthology.org/2026.teachingnlp-1.17/</url>
</location>
<part>
<date>2026-03</date>
<extent unit="page">
<start>129</start>
<end>133</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Novel or Drivel? Variants of Invariants for Teaching NLP in the LLM Era
%A Micluța-Câmpeanu, Marius
%Y Aßenmacher, Matthias
%Y Biester, Laura
%Y Borg, Claudia
%Y Kovács, György
%Y Mieskes, Margot
%Y Serrano, Sofia
%S Proceedings of the Seventh Workshop on Teaching Natural Language Processing (TeachNLP 2026)
%D 2026
%8 March
%I Association for Computational Linguistics
%C Rabat, Morocco
%@ 979-8-89176-375-3
%F micluta-campeanu-2026-novel-drivel
%X The ubiquitous adoption of large language models by students prompts teachers to redesign courses and evaluation methods, especially in computer science and natural language processing (NLP) where the impact is more tangible.Our contribution is two-fold. First, we attempt to define invariants for the role of education itself given the over-abundance of information that appears to be more accessible than ever before. Then, we present our approach and materials used for an introductory course in NLP for undergraduate students, drawing inspiration from software engineering best practices. Our vision regarding large language models is torely on local models to cultivate a sense of ownership and sovereignty in an age where every bit of independence and privacy get eroded.
%U https://aclanthology.org/2026.teachingnlp-1.17/
%P 129-133
Markdown (Informal)
[Novel or Drivel? Variants of Invariants for Teaching NLP in the LLM Era](https://aclanthology.org/2026.teachingnlp-1.17/) (Micluța-Câmpeanu, TeachingNLP 2026)
ACL