@inproceedings{763f8f6b61ac4e7eb3fa6571e8fd7f43,
title = "Enhancing the recurrent neural networks with positional gates for sentence representation",
abstract = "The recurrent neural networks (RNN) with attention mechanism have shown good performance for answer selection in recent years. Most previous attention mechanisms focus on generating the attentive weights after obtaining all the hidden states, while the contextual information from the other sentence is not well studied during the internal hidden state generation. In this paper, we propose a position gated RNN (PG-RNN) model, which merges the positional contextual information of the question words for the inner hidden state generation. Specifically, we first design a positional interaction monitor to detect and measure the positional influence of question word within answer sentence. Then we present a positional gating mechanism and embed it into RNN to automatically absorb the positional contextual information for the hidden state update. Experiments on two benchmark datasets, namely TREC-QA and WikiQA, show the great advantages of our proposed model. In particular, we achieve the new state-of-the-art performance on TREC-QA and WikiQA.",
keywords = "Attention, Gate, Position, Recurrent neural network",
author = "Yang Song and Wenxin Hu and Qin Chen and Qinmin Hu and Liang He",
note = "Publisher Copyright: {\textcopyright} 2018, Springer Nature Switzerland AG.; 25th International Conference on Neural Information Processing, ICONIP 2018 ; Conference date: 13-12-2018 Through 16-12-2018",
year = "2018",
doi = "10.1007/978-3-030-04167-0\_46",
language = "英语",
isbn = "9783030041663",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "511--521",
editor = "Long Cheng and Leung, \{Andrew Chi Sing\} and Seiichi Ozawa",
booktitle = "Neural Information Processing - 25th International Conference, ICONIP 2018, Proceedings",
address = "德国",
}