@inproceedings{bee668e79bb34911b19b5f059f164969,
title = "Dependent Multilevel Interaction Network for Natural Language Inference",
abstract = "Neural networks have attracted great attention for natural language inference in recent years. Interactions between the premise and the hypothesis have been proved to be effective in improving the representations. Existing methods mainly focused on a single interaction, while multiple interactions have not been well studied. In this paper, we propose a dependent multilevel interaction (DMI) Network which models multiple interactions between the premise and the hypothesis to boost the performance of natural language inference. In specific, a single-interaction unit (SIU) structure with a novel combining attention mechanism is presented to capture features in an interaction. Then, we cascade a serial of SIUs in a multilevel interaction layer to obtain more comprehensive features. Experiments on two benchmark datasets, namely SciTail and SNLI, show the effectiveness of our proposed model. Our model outperforms the state-of-the-art approaches on the SciTail dataset without using any external resources. For the SNLI dataset, our model also achieves competitive results.",
keywords = "Attention mechanism, Deep learning, Sentence interaction",
author = "Yun Li and Yan Yang and Yong Deng and Hu, \{Qinmin Vivian\} and Chengcai Chen and Liang He and Zhou Yu",
note = "Publisher Copyright: {\textcopyright} 2019, Springer Nature Switzerland AG.; 28th International Conference on Artificial Neural Networks: Workshop and Special Sessions, ICANN 2019 ; Conference date: 17-09-2019 Through 19-09-2019",
year = "2019",
doi = "10.1007/978-3-030-30490-4\_2",
language = "英语",
isbn = "9783030304898",
series = "Lecture Notes in Computer Science",
publisher = "Springer Verlag",
pages = "9--21",
editor = "Tetko, \{Igor V.\} and Pavel Karpov and Fabian Theis and Vera Kurkov{\'a}",
booktitle = "Artificial Neural Networks and Machine Learning – ICANN 2019",
address = "德国",
}