{
    "author": null,
    "date_published": "2021-02-02T00:00:00.000Z",
    "dek": null,
    "direction": "ltr",
    "domain": "arxiv.org",
    "excerpt": "We study empirical scaling laws for transfer learning between distributions in an unsupervised, fine-tuning setting. When we train increasingly large neural networks from-scratch on a fixed-size&hellip;",
    "lead_image_url": "https://static.arxiv.org/icons/twitter/arxiv-logo-twitter-square.png",
    "next_page_url": null,
    "rendered_pages": 1,
    "title": "Scaling Laws for Transfer",
    "total_pages": 1,
    "url": "https://arxiv.org/abs/2102.01293v1",
    "word_count": 201
}