@InProceedings{han-EtAl:2018:Demos,
  author    = {Han, Xu  and  Cao, Shulin  and  Lv, Xin  and  Lin, Yankai  and  Liu, Zhiyuan  and  Sun, Maosong  and  Li, Juanzi},
  title     = {OpenKE: An Open Toolkit for Knowledge Embedding},
  booktitle = {Proceedings of the 2018 Conference on Empirical Methods in Natural Language Processing: System Demonstrations},
  month     = {November},
  year      = {2018},
  address   = {Brussels, Belgium},
  publisher = {Association for Computational Linguistics},
  pages     = {139--144},
  abstract  = {We release an open toolkit for knowledge embedding (OpenKE), which provides a unified framework and various fundamental models to embed knowledge graphs into a continuous low-dimensional space. OpenKE prioritizes operational efficiency to support quick model validation and large-scale knowledge representation learning. Meanwhile, OpenKE maintains sufficient modularity and extensibility to easily incorporate new models into the framework. Besides the toolkit, the embeddings of some existing large-scale knowledge graphs pre-trained by OpenKE are also available, which can be directly applied for many applications including information retrieval, personalized recommendation and question answering. The toolkit, documentation, and pre-trained embeddings are all released on http://openke.thunlp.org/.},
  url       = {http://www.aclweb.org/anthology/D18-2024}
}

