{
    "author": null,
    "date_published": null,
    "dek": null,
    "direction": "ltr",
    "domain": "kylrth.com",
    "excerpt": "I also referred to this implementation to understand some of the details.This is the paper describing the Transformer, a sequence-to-sequence model based entirely on attention. I think it\u2019s best&hellip;",
    "lead_image_url": null,
    "next_page_url": null,
    "rendered_pages": 1,
    "title": "Attention is all you need",
    "total_pages": 1,
    "url": "https://kylrth.com/paper/attention-all-you-need/",
    "word_count": 1
}