@inproceedings{sedova-etal-2021-knodle,
title = "Knodle: Modular Weakly Supervised Learning with {P}y{T}orch",
author = "Sedova, Anastasiia and
Stephan, Andreas and
Speranskaya, Marina and
Roth, Benjamin",
editor = "Rogers, Anna and
Calixto, Iacer and
Vuli{\'c}, Ivan and
Saphra, Naomi and
Kassner, Nora and
Camburu, Oana-Maria and
Bansal, Trapit and
Shwartz, Vered",
booktitle = "Proceedings of the 6th Workshop on Representation Learning for NLP (RepL4NLP-2021)",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.repl4nlp-1.12",
doi = "10.18653/v1/2021.repl4nlp-1.12",
pages = "100--111",
abstract = "Strategies for improving the training and prediction quality of weakly supervised machine learning models vary in how much they are tailored to a specific task or integrated with a specific model architecture. In this work, we introduce Knodle, a software framework that treats weak data annotations, deep learning models, and methods for improving weakly supervised training as separate, modular components. This modularization gives the training process access to fine-grained information such as data set characteristics, matches of heuristic rules, or elements of the deep learning model ultimately used for prediction. Hence, our framework can encompass a wide range of training methods for improving weak supervision, ranging from methods that only look at correlations of rules and output classes (independently of the machine learning model trained with the resulting labels), to those that harness the interplay of neural networks and weakly labeled data. We illustrate the benchmarking potential of the framework with a performance comparison of several reference implementations on a selection of datasets that are already available in Knodle.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="sedova-etal-2021-knodle">
<titleInfo>
<title>Knodle: Modular Weakly Supervised Learning with PyTorch</title>
</titleInfo>
<name type="personal">
<namePart type="given">Anastasiia</namePart>
<namePart type="family">Sedova</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Andreas</namePart>
<namePart type="family">Stephan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Marina</namePart>
<namePart type="family">Speranskaya</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Benjamin</namePart>
<namePart type="family">Roth</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-08</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 6th Workshop on Representation Learning for NLP (RepL4NLP-2021)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Anna</namePart>
<namePart type="family">Rogers</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Iacer</namePart>
<namePart type="family">Calixto</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ivan</namePart>
<namePart type="family">Vulić</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Naomi</namePart>
<namePart type="family">Saphra</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Nora</namePart>
<namePart type="family">Kassner</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Oana-Maria</namePart>
<namePart type="family">Camburu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Trapit</namePart>
<namePart type="family">Bansal</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Vered</namePart>
<namePart type="family">Shwartz</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Strategies for improving the training and prediction quality of weakly supervised machine learning models vary in how much they are tailored to a specific task or integrated with a specific model architecture. In this work, we introduce Knodle, a software framework that treats weak data annotations, deep learning models, and methods for improving weakly supervised training as separate, modular components. This modularization gives the training process access to fine-grained information such as data set characteristics, matches of heuristic rules, or elements of the deep learning model ultimately used for prediction. Hence, our framework can encompass a wide range of training methods for improving weak supervision, ranging from methods that only look at correlations of rules and output classes (independently of the machine learning model trained with the resulting labels), to those that harness the interplay of neural networks and weakly labeled data. We illustrate the benchmarking potential of the framework with a performance comparison of several reference implementations on a selection of datasets that are already available in Knodle.</abstract>
<identifier type="citekey">sedova-etal-2021-knodle</identifier>
<identifier type="doi">10.18653/v1/2021.repl4nlp-1.12</identifier>
<location>
<url>https://aclanthology.org/2021.repl4nlp-1.12</url>
</location>
<part>
<date>2021-08</date>
<extent unit="page">
<start>100</start>
<end>111</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Knodle: Modular Weakly Supervised Learning with PyTorch
%A Sedova, Anastasiia
%A Stephan, Andreas
%A Speranskaya, Marina
%A Roth, Benjamin
%Y Rogers, Anna
%Y Calixto, Iacer
%Y Vulić, Ivan
%Y Saphra, Naomi
%Y Kassner, Nora
%Y Camburu, Oana-Maria
%Y Bansal, Trapit
%Y Shwartz, Vered
%S Proceedings of the 6th Workshop on Representation Learning for NLP (RepL4NLP-2021)
%D 2021
%8 August
%I Association for Computational Linguistics
%C Online
%F sedova-etal-2021-knodle
%X Strategies for improving the training and prediction quality of weakly supervised machine learning models vary in how much they are tailored to a specific task or integrated with a specific model architecture. In this work, we introduce Knodle, a software framework that treats weak data annotations, deep learning models, and methods for improving weakly supervised training as separate, modular components. This modularization gives the training process access to fine-grained information such as data set characteristics, matches of heuristic rules, or elements of the deep learning model ultimately used for prediction. Hence, our framework can encompass a wide range of training methods for improving weak supervision, ranging from methods that only look at correlations of rules and output classes (independently of the machine learning model trained with the resulting labels), to those that harness the interplay of neural networks and weakly labeled data. We illustrate the benchmarking potential of the framework with a performance comparison of several reference implementations on a selection of datasets that are already available in Knodle.
%R 10.18653/v1/2021.repl4nlp-1.12
%U https://aclanthology.org/2021.repl4nlp-1.12
%U https://doi.org/10.18653/v1/2021.repl4nlp-1.12
%P 100-111
Markdown (Informal)
[Knodle: Modular Weakly Supervised Learning with PyTorch](https://aclanthology.org/2021.repl4nlp-1.12) (Sedova et al., RepL4NLP 2021)
ACL
- Anastasiia Sedova, Andreas Stephan, Marina Speranskaya, and Benjamin Roth. 2021. Knodle: Modular Weakly Supervised Learning with PyTorch. In Proceedings of the 6th Workshop on Representation Learning for NLP (RepL4NLP-2021), pages 100–111, Online. Association for Computational Linguistics.