@inproceedings{wakabayashi-2019-silent,
title = "Silent {HMM}s: Generalized Representation of Hidden Semi-{M}arkov Models and Hierarchical {HMM}s",
author = "Wakabayashi, Kei",
editor = "Vogler, Heiko and
Maletti, Andreas",
booktitle = "Proceedings of the 14th International Conference on Finite-State Methods and Natural Language Processing",
month = sep,
year = "2019",
address = "Dresden, Germany",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W19-3113",
doi = "10.18653/v1/W19-3113",
pages = "98--107",
abstract = "Modeling sequence data using probabilistic finite state machines (PFSMs) is a technique that analyzes the underlying dynamics in sequences of symbols. Hidden semi-Markov models (HSMMs) and hierarchical hidden Markov models (HHMMs) are PFSMs that have been successfully applied to a wide variety of applications by extending HMMs to make the extracted patterns easier to interpret. However, these models are independently developed with their own training algorithm, so that we cannot combine multiple kinds of structures to build a PFSM for a specific application. In this paper, we prove that silent hidden Markov models (silent HMMs) are flexible models that have more expressive power than HSMMs and HHMMs. Silent HMMs are HMMs that contain silent states, which do not emit any observations. We show that we can obtain silent HMM equivalent to given HSMMs and HHMMs. We believe that these results form a firm foundation to use silent HMMs as a unified representation for PFSM modeling.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="wakabayashi-2019-silent">
<titleInfo>
<title>Silent HMMs: Generalized Representation of Hidden Semi-Markov Models and Hierarchical HMMs</title>
</titleInfo>
<name type="personal">
<namePart type="given">Kei</namePart>
<namePart type="family">Wakabayashi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-09</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 14th International Conference on Finite-State Methods and Natural Language Processing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Heiko</namePart>
<namePart type="family">Vogler</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Andreas</namePart>
<namePart type="family">Maletti</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Dresden, Germany</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Modeling sequence data using probabilistic finite state machines (PFSMs) is a technique that analyzes the underlying dynamics in sequences of symbols. Hidden semi-Markov models (HSMMs) and hierarchical hidden Markov models (HHMMs) are PFSMs that have been successfully applied to a wide variety of applications by extending HMMs to make the extracted patterns easier to interpret. However, these models are independently developed with their own training algorithm, so that we cannot combine multiple kinds of structures to build a PFSM for a specific application. In this paper, we prove that silent hidden Markov models (silent HMMs) are flexible models that have more expressive power than HSMMs and HHMMs. Silent HMMs are HMMs that contain silent states, which do not emit any observations. We show that we can obtain silent HMM equivalent to given HSMMs and HHMMs. We believe that these results form a firm foundation to use silent HMMs as a unified representation for PFSM modeling.</abstract>
<identifier type="citekey">wakabayashi-2019-silent</identifier>
<identifier type="doi">10.18653/v1/W19-3113</identifier>
<location>
<url>https://aclanthology.org/W19-3113</url>
</location>
<part>
<date>2019-09</date>
<extent unit="page">
<start>98</start>
<end>107</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Silent HMMs: Generalized Representation of Hidden Semi-Markov Models and Hierarchical HMMs
%A Wakabayashi, Kei
%Y Vogler, Heiko
%Y Maletti, Andreas
%S Proceedings of the 14th International Conference on Finite-State Methods and Natural Language Processing
%D 2019
%8 September
%I Association for Computational Linguistics
%C Dresden, Germany
%F wakabayashi-2019-silent
%X Modeling sequence data using probabilistic finite state machines (PFSMs) is a technique that analyzes the underlying dynamics in sequences of symbols. Hidden semi-Markov models (HSMMs) and hierarchical hidden Markov models (HHMMs) are PFSMs that have been successfully applied to a wide variety of applications by extending HMMs to make the extracted patterns easier to interpret. However, these models are independently developed with their own training algorithm, so that we cannot combine multiple kinds of structures to build a PFSM for a specific application. In this paper, we prove that silent hidden Markov models (silent HMMs) are flexible models that have more expressive power than HSMMs and HHMMs. Silent HMMs are HMMs that contain silent states, which do not emit any observations. We show that we can obtain silent HMM equivalent to given HSMMs and HHMMs. We believe that these results form a firm foundation to use silent HMMs as a unified representation for PFSM modeling.
%R 10.18653/v1/W19-3113
%U https://aclanthology.org/W19-3113
%U https://doi.org/10.18653/v1/W19-3113
%P 98-107
Markdown (Informal)
[Silent HMMs: Generalized Representation of Hidden Semi-Markov Models and Hierarchical HMMs](https://aclanthology.org/W19-3113) (Wakabayashi, FSMNLP 2019)
ACL