@inproceedings{aced80eaa78e4c42bcea6db09a0a9c38,
title = "A Joint-BERT Method for Knowledge Base Question Answering",
abstract = "This paper proposes a Joint-BERT method to solve the knowledge base question answering (KBQA) task involved a single fact. It divides it into two subtasks, topic entity recognition and relation detection. For the entity recognition subtask, instead of treating it as a sequence labeling task, a simpler approach of applying a pointer network on the BERT encoder is used to predict the start and end positions of the topic entity. The subtask of relation detection, sharing the BERT encoder, computes candidate predicates rankings from both local and global matching perspectives, and finally trains the two tasks jointly in a multi-task learning framework, so that the two tasks benefit from each other. Experiments show that: Joint-BERT model achieves competitive results on the SimpleQuestions benchmark.",
keywords = "BERT, KBQA, Multi-task Learning, Pointer Network",
author = "Tianyu Zhang and Zhiyun Chen",
note = "Publisher Copyright: {\textcopyright} 2022 ACM.; 5th International Conference on Machine Learning and Natural Language Processing, MLNLP 2022 ; Conference date: 23-12-2022 Through 25-12-2022",
year = "2022",
month = dec,
day = "23",
doi = "10.1145/3578741.3578749",
language = "英语",
series = "ACM International Conference Proceeding Series",
publisher = "Association for Computing Machinery",
pages = "35--40",
booktitle = "MLNLP 2022 - 2022 5th International Conference on Machine Learning and Natural Language Processing, Conference Proceedings",
address = "美国",
}