{"payload":{"feedbackUrl":"https://github.com/orgs/community/discussions/53140","repo":{"id":665758053,"defaultBranch":"main","name":"attorch","ownerLogin":"BobMcDear","currentUserCanPush":false,"isFork":false,"isEmpty":false,"createdAt":"2023-07-13T00:06:15.000Z","ownerAvatar":"https://avatars.githubusercontent.com/u/62210058?v=4","public":true,"private":false,"isOrgOwned":false},"refInfo":{"name":"","listCacheKey":"v0:1691026707.0","currentOid":""},"activityList":{"items":[{"before":"55bbb98118cdeb0a787a2da493a92fb079175b14","after":"e00701feed3c518a3d065fdb2c53678c9f8206dd","ref":"refs/heads/main","pushedAt":"2024-05-30T13:02:00.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Docs: Add gated linear unit to docs","shortMessageHtmlLink":"Docs: Add gated linear unit to docs"}},{"before":"e9d72e70bf481f6dda05f4c9645a3be766a1b936","after":"55bbb98118cdeb0a787a2da493a92fb079175b14","ref":"refs/heads/main","pushedAt":"2024-05-29T12:28:53.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Test: Add tests for gated linear unit","shortMessageHtmlLink":"Test: Add tests for gated linear unit"}},{"before":"3ab0ddb423dd9cbf04555324b8610ed67cf78227","after":"e9d72e70bf481f6dda05f4c9645a3be766a1b936","ref":"refs/heads/main","pushedAt":"2024-05-28T12:11:37.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Feat: Add pure math ops (linear accumulation)","shortMessageHtmlLink":"Feat: Add pure math ops (linear accumulation)"}},{"before":"1a3a517df1f08df321c63f02dca13bde043f66e9","after":"3ab0ddb423dd9cbf04555324b8610ed67cf78227","ref":"refs/heads/main","pushedAt":"2024-05-13T11:56:08.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Feat: Add gated linear unit layer","shortMessageHtmlLink":"Feat: Add gated linear unit layer"}},{"before":"4f8258c7a8be0a5480e92d27fe0c119d3cbd5d08","after":"1a3a517df1f08df321c63f02dca13bde043f66e9","ref":"refs/heads/main","pushedAt":"2024-05-10T13:00:34.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Fix: Fix layer norm autodiff not returning enough grads","shortMessageHtmlLink":"Fix: Fix layer norm autodiff not returning enough grads"}},{"before":"22590b6bd0e0aa88ee4a491545bb13497eca7116","after":"4f8258c7a8be0a5480e92d27fe0c119d3cbd5d08","ref":"refs/heads/main","pushedAt":"2024-05-08T11:58:37.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Feat: Add gated linear unit kernels","shortMessageHtmlLink":"Feat: Add gated linear unit kernels"}},{"before":"6a755cc47e91a29c07e854153628955c292a7e18","after":"22590b6bd0e0aa88ee4a491545bb13497eca7116","ref":"refs/heads/main","pushedAt":"2024-05-06T15:12:41.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Docs: Add fused dropout support for activations to docs","shortMessageHtmlLink":"Docs: Add fused dropout support for activations to docs"}},{"before":"15c84c734131be9882956693e03ab5b4f9ee20ad","after":"6a755cc47e91a29c07e854153628955c292a7e18","ref":"refs/heads/main","pushedAt":"2024-05-02T11:58:07.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Test: Add tests for activation layers with fused dropout","shortMessageHtmlLink":"Test: Add tests for activation layers with fused dropout"}},{"before":"a231aac6496bb5dbb139d82525c81d9f5fb3ab8d","after":"15c84c734131be9882956693e03ab5b4f9ee20ad","ref":"refs/heads/main","pushedAt":"2024-04-29T12:09:34.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Feat: Add fused dropout support to activation layers","shortMessageHtmlLink":"Feat: Add fused dropout support to activation layers"}},{"before":"1547a7dc97e765a7a31a45142913fccabf267215","after":"a231aac6496bb5dbb139d82525c81d9f5fb3ab8d","ref":"refs/heads/main","pushedAt":"2024-04-28T12:29:23.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Fix: Fix dropout activation kernel calls in BN & linear","shortMessageHtmlLink":"Fix: Fix dropout activation kernel calls in BN & linear"}},{"before":"25714e23019288e9652decaa341c9a49021443b9","after":"1547a7dc97e765a7a31a45142913fccabf267215","ref":"refs/heads/main","pushedAt":"2024-04-26T12:10:21.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Feat: Add fused dropout support to activation kernels","shortMessageHtmlLink":"Feat: Add fused dropout support to activation kernels"}},{"before":"7ff1844db467ba0e53397bd303882c09420012cf","after":"25714e23019288e9652decaa341c9a49021443b9","ref":"refs/heads/main","pushedAt":"2024-04-25T11:51:37.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Refactor: Refactor dropout forward & backward into pure functions","shortMessageHtmlLink":"Refactor: Refactor dropout forward & backward into pure functions"}},{"before":"be83f9a8157be7d39dc9d2302c0cf3c5ec22aa2a","after":"7ff1844db467ba0e53397bd303882c09420012cf","ref":"refs/heads/main","pushedAt":"2024-04-22T11:28:11.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Docs: Add multi-headed attention layer to docs","shortMessageHtmlLink":"Docs: Add multi-headed attention layer to docs"}},{"before":"a802c9f6655f920264e6ac533efa703b24e285e2","after":"be83f9a8157be7d39dc9d2302c0cf3c5ec22aa2a","ref":"refs/heads/main","pushedAt":"2024-04-18T12:02:20.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Feat: Add option to disable FP32 autocasting for layer norm","shortMessageHtmlLink":"Feat: Add option to disable FP32 autocasting for layer norm"}},{"before":"297f7c4154ce94d61e28417ddeb22db988c4483e","after":"a802c9f6655f920264e6ac533efa703b24e285e2","ref":"refs/heads/main","pushedAt":"2024-04-15T12:43:41.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Test: Add tests for multi-headed attention layer","shortMessageHtmlLink":"Test: Add tests for multi-headed attention layer"}},{"before":"679391d45820b685cdb8a03578bfcf69598474e2","after":"297f7c4154ce94d61e28417ddeb22db988c4483e","ref":"refs/heads/main","pushedAt":"2024-04-12T12:47:29.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Feat: Add multi-headed attention layer","shortMessageHtmlLink":"Feat: Add multi-headed attention layer"}},{"before":"fed2020534bc7ced8fd06db1e558fc38a0059e9e","after":"679391d45820b685cdb8a03578bfcf69598474e2","ref":"refs/heads/main","pushedAt":"2024-04-01T16:13:17.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Docs: Add Imagenette training example","shortMessageHtmlLink":"Docs: Add Imagenette training example"}},{"before":"3fd5a527319e95c33d7be429394887e8eba9170e","after":"fed2020534bc7ced8fd06db1e558fc38a0059e9e","ref":"refs/heads/main","pushedAt":"2024-03-29T12:18:48.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Improvement: Remove dtype and device constraints for parameters","shortMessageHtmlLink":"Improvement: Remove dtype and device constraints for parameters"}},{"before":"77174c9dae740b9d1027796253dbb97125f8a29a","after":"3fd5a527319e95c33d7be429394887e8eba9170e","ref":"refs/heads/main","pushedAt":"2024-03-27T13:00:10.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Docs: Better organize README and add table of contents","shortMessageHtmlLink":"Docs: Better organize README and add table of contents"}},{"before":"6d824b8491ad6cdd3a1804d908fae383dd428bee","after":"77174c9dae740b9d1027796253dbb97125f8a29a","ref":"refs/heads/main","pushedAt":"2024-03-26T11:56:35.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Docs: Improve README","shortMessageHtmlLink":"Docs: Improve README"}},{"before":"fa70e2c6fd835a397c9cc721c23b2ff51fa5f8c9","after":"6d824b8491ad6cdd3a1804d908fae383dd428bee","ref":"refs/heads/main","pushedAt":"2024-03-25T12:02:07.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Test: Add tests for AMP p-norm induced losses","shortMessageHtmlLink":"Test: Add tests for AMP p-norm induced losses"}},{"before":"5e63dd2ec245990a1764a08b5ab97b47b08193da","after":"fa70e2c6fd835a397c9cc721c23b2ff51fa5f8c9","ref":"refs/heads/main","pushedAt":"2024-03-23T12:54:24.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Test: Add tests for AMP NLL loss","shortMessageHtmlLink":"Test: Add tests for AMP NLL loss"}},{"before":"32d1d3f61a6ee0bde8bc9ea9752e575c5e4b147f","after":"5e63dd2ec245990a1764a08b5ab97b47b08193da","ref":"refs/heads/main","pushedAt":"2024-03-22T12:07:01.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Test: Add tests for AMP softmax","shortMessageHtmlLink":"Test: Add tests for AMP softmax"}},{"before":"97d5426a6b02d8e246ac360b9c833897d477d8be","after":"32d1d3f61a6ee0bde8bc9ea9752e575c5e4b147f","ref":"refs/heads/main","pushedAt":"2024-03-21T23:21:05.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Style: Remove extra blank line","shortMessageHtmlLink":"Style: Remove extra blank line"}},{"before":"499fddac8d56e35fc9334c0aa0ed54b5380dbb1f","after":"97d5426a6b02d8e246ac360b9c833897d477d8be","ref":"refs/heads/main","pushedAt":"2024-03-19T12:01:44.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Test: Add tests for AMP linear layer","shortMessageHtmlLink":"Test: Add tests for AMP linear layer"}},{"before":"d3117c8ed8ec78625d21ac8b757d74231f228b69","after":"499fddac8d56e35fc9334c0aa0ed54b5380dbb1f","ref":"refs/heads/main","pushedAt":"2024-03-15T12:13:49.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Feat: Add AMP support for softmax layers","shortMessageHtmlLink":"Feat: Add AMP support for softmax layers"}},{"before":"384d818ca2483d0b2a7e4078db61ed444970117a","after":"d3117c8ed8ec78625d21ac8b757d74231f228b69","ref":"refs/heads/main","pushedAt":"2024-03-14T23:17:38.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Feat: Add AMP support for p-norm losses","shortMessageHtmlLink":"Feat: Add AMP support for p-norm losses"}},{"before":"9024a1eb280755e31ca82b512c578636b121f81f","after":"384d818ca2483d0b2a7e4078db61ed444970117a","ref":"refs/heads/main","pushedAt":"2024-03-11T12:10:13.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Test: Add test for AMP layer normalization","shortMessageHtmlLink":"Test: Add test for AMP layer normalization"}},{"before":"88bcfcaa341a78ffd19026578530d3b0eca7e4b1","after":"9024a1eb280755e31ca82b512c578636b121f81f","ref":"refs/heads/main","pushedAt":"2024-03-10T13:30:26.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Feat: Add AMP support for NLL layer","shortMessageHtmlLink":"Feat: Add AMP support for NLL layer"}},{"before":"384d3013a122eaa1724cf7a36f97d28ecf7d5587","after":"88bcfcaa341a78ffd19026578530d3b0eca7e4b1","ref":"refs/heads/main","pushedAt":"2024-03-08T13:41:11.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"BobMcDear","name":"Borna Ahmadzadeh","path":"/BobMcDear","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/62210058?s=80&v=4"},"commit":{"message":"Fix: Add missing allow_tf32 utility function","shortMessageHtmlLink":"Fix: Add missing allow_tf32 utility function"}}],"hasNextPage":true,"hasPreviousPage":false,"activityType":"all","actor":null,"timePeriod":"all","sort":"DESC","perPage":30,"cursor":"djE6ks8AAAAEWAXoBAA","startCursor":null,"endCursor":null}},"title":"Activity ยท BobMcDear/attorch"}