{
    "author": null,
    "date_published": null,
    "dek": null,
    "direction": "ltr",
    "domain": "kylrth.com",
    "excerpt": "Posted on 2020-08-05 at 12:37:42 UTC-0700I also referred to this implementation to understand some of the details. This is the paper describing the Transformer, a sequence-to-sequence model based&hellip;",
    "lead_image_url": null,
    "next_page_url": null,
    "rendered_pages": 1,
    "title": "attention",
    "total_pages": 1,
    "url": "https://kylrth.com/tags/attention/",
    "word_count": 1
}