@inproceedings{tikhonova-etal-2026-standard-transformers,
title = "From Standard Transformers to {M}odern {LLM}s: Bringing Dialogue Models, {RAG}, and Agents to the Classroom",
author = "Tikhonova, Maria and
Chekalina, Viktoriia A. and
Chervyakov, Artem and
Zaytsev, Alexey and
Panchenko, Alexander",
editor = {A{\ss}enmacher, Matthias and
Biester, Laura and
Borg, Claudia and
Kov{\'a}cs, Gy{\"o}rgy and
Mieskes, Margot and
Serrano, Sofia},
booktitle = "Proceedings of the Seventh Workshop on Teaching Natural Language Processing ({T}each{NLP} 2026)",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2026.teachingnlp-1.8/",
pages = "41--44",
ISBN = "979-8-89176-375-3",
abstract = "Modern LLM education is increasingly centered on system building: grounding generation with retrieval, enabling tool use, and deploying models under latency and cost constraints.We present an updated release of our open course on Transformer-based LLMs and multimodal models (Nikishina et al, 2024).The update introduces topics which became importance since the first edition, namely session on Retrieval Augmented Generation (RAG), a hands-on session on tool-using agents, an API-based track for applied work with LLM, and practical local inference with vLLM.We also add a dedicated session on multimodal dialog models with a focus on dialog grounding. We enriched the course with a discussion on long-context transformers, focusing on KV-cache efficiency along with the related models and benchmarks.All materials are released online."
}<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="tikhonova-etal-2026-standard-transformers">
<titleInfo>
<title>From Standard Transformers to Modern LLMs: Bringing Dialogue Models, RAG, and Agents to the Classroom</title>
</titleInfo>
<name type="personal">
<namePart type="given">Maria</namePart>
<namePart type="family">Tikhonova</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Viktoriia</namePart>
<namePart type="given">A</namePart>
<namePart type="family">Chekalina</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Artem</namePart>
<namePart type="family">Chervyakov</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alexey</namePart>
<namePart type="family">Zaytsev</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alexander</namePart>
<namePart type="family">Panchenko</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2026-03</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Seventh Workshop on Teaching Natural Language Processing (TeachNLP 2026)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Matthias</namePart>
<namePart type="family">Aßenmacher</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Laura</namePart>
<namePart type="family">Biester</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Claudia</namePart>
<namePart type="family">Borg</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">György</namePart>
<namePart type="family">Kovács</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Margot</namePart>
<namePart type="family">Mieskes</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sofia</namePart>
<namePart type="family">Serrano</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Rabat, Morocco</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
<identifier type="isbn">979-8-89176-375-3</identifier>
</relatedItem>
<abstract>Modern LLM education is increasingly centered on system building: grounding generation with retrieval, enabling tool use, and deploying models under latency and cost constraints.We present an updated release of our open course on Transformer-based LLMs and multimodal models (Nikishina et al, 2024).The update introduces topics which became importance since the first edition, namely session on Retrieval Augmented Generation (RAG), a hands-on session on tool-using agents, an API-based track for applied work with LLM, and practical local inference with vLLM.We also add a dedicated session on multimodal dialog models with a focus on dialog grounding. We enriched the course with a discussion on long-context transformers, focusing on KV-cache efficiency along with the related models and benchmarks.All materials are released online.</abstract>
<identifier type="citekey">tikhonova-etal-2026-standard-transformers</identifier>
<location>
<url>https://aclanthology.org/2026.teachingnlp-1.8/</url>
</location>
<part>
<date>2026-03</date>
<extent unit="page">
<start>41</start>
<end>44</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T From Standard Transformers to Modern LLMs: Bringing Dialogue Models, RAG, and Agents to the Classroom
%A Tikhonova, Maria
%A Chekalina, Viktoriia A.
%A Chervyakov, Artem
%A Zaytsev, Alexey
%A Panchenko, Alexander
%Y Aßenmacher, Matthias
%Y Biester, Laura
%Y Borg, Claudia
%Y Kovács, György
%Y Mieskes, Margot
%Y Serrano, Sofia
%S Proceedings of the Seventh Workshop on Teaching Natural Language Processing (TeachNLP 2026)
%D 2026
%8 March
%I Association for Computational Linguistics
%C Rabat, Morocco
%@ 979-8-89176-375-3
%F tikhonova-etal-2026-standard-transformers
%X Modern LLM education is increasingly centered on system building: grounding generation with retrieval, enabling tool use, and deploying models under latency and cost constraints.We present an updated release of our open course on Transformer-based LLMs and multimodal models (Nikishina et al, 2024).The update introduces topics which became importance since the first edition, namely session on Retrieval Augmented Generation (RAG), a hands-on session on tool-using agents, an API-based track for applied work with LLM, and practical local inference with vLLM.We also add a dedicated session on multimodal dialog models with a focus on dialog grounding. We enriched the course with a discussion on long-context transformers, focusing on KV-cache efficiency along with the related models and benchmarks.All materials are released online.
%U https://aclanthology.org/2026.teachingnlp-1.8/
%P 41-44
Markdown (Informal)
[From Standard Transformers to Modern LLMs: Bringing Dialogue Models, RAG, and Agents to the Classroom](https://aclanthology.org/2026.teachingnlp-1.8/) (Tikhonova et al., TeachingNLP 2026)
ACL