@inproceedings{Dai_2019,
title={Transformer-XL: Attentive Language Models beyond a Fixed-Length Context},
url={http://dx.doi.org/10.18653/v1/p19-1285},
DOI={10.18653/v1/p19-1285},
booktitle={Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics},
publisher={Association for Computational Linguistics},
author={Dai,
Zihang and Yang,
Zhilin and Yang,
Yiming and Carbonell,
Jaime and Le,
Quoc and Salakhutdinov,
Ruslan},
year={2019} }