{"payload":{"feedbackUrl":"https://github.com/orgs/community/discussions/53140","repo":{"id":792957876,"defaultBranch":"main","name":"llama-recipes","ownerLogin":"lenovo","currentUserCanPush":false,"isFork":true,"isEmpty":false,"createdAt":"2024-04-28T03:03:17.000Z","ownerAvatar":"https://avatars.githubusercontent.com/u/13356730?v=4","public":true,"private":false,"isOrgOwned":true},"refInfo":{"name":"","listCacheKey":"v0:1718790962.0","currentOid":""},"activityList":{"items":[{"before":null,"after":"54d285956653b4ff495bd21dc85957cf58363dd1","ref":"refs/heads/2024.6","pushedAt":"2024-06-19T09:56:02.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"enuff-tech","name":"enuff","path":"/enuff-tech","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/42442669?s=80&v=4"},"commit":{"message":"FMBench: benchmarking Llama models on AWS (#452)","shortMessageHtmlLink":"FMBench: benchmarking Llama models on AWS (meta-llama#452)"}},{"before":"54d285956653b4ff495bd21dc85957cf58363dd1","after":null,"ref":"refs/tags/2024.7","pushedAt":"2024-06-19T09:54:33.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"enuff-tech","name":"enuff","path":"/enuff-tech","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/42442669?s=80&v=4"}},{"before":"98cacbb4f1e093c40001e37964662e9c259253c4","after":null,"ref":"refs/tags/2.24.6.1","pushedAt":"2024-06-19T09:46:10.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"enuff-tech","name":"enuff","path":"/enuff-tech","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/42442669?s=80&v=4"}},{"before":"802b8378e9c16aae666769b11809dc18b72f9982","after":null,"ref":"refs/tags/2024.4","pushedAt":"2024-05-06T03:03:59.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"CarrotXin","name":"hexin","path":"/CarrotXin","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/7135358?s=80&v=4"}},{"before":"54d285956653b4ff495bd21dc85957cf58363dd1","after":null,"ref":"refs/heads/https/github.com/meta-llama/llama-recipes/tree/37c8f722116493e69ea99420b3d73287905a46d0","pushedAt":"2024-05-06T02:44:23.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"CarrotXin","name":"hexin","path":"/CarrotXin","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/7135358?s=80&v=4"}},{"before":null,"after":"54d285956653b4ff495bd21dc85957cf58363dd1","ref":"refs/heads/https/github.com/meta-llama/llama-recipes/tree/37c8f722116493e69ea99420b3d73287905a46d0","pushedAt":"2024-05-06T02:43:21.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"CarrotXin","name":"hexin","path":"/CarrotXin","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/7135358?s=80&v=4"},"commit":{"message":"FMBench: benchmarking Llama models on AWS (#452)","shortMessageHtmlLink":"FMBench: benchmarking Llama models on AWS (meta-llama#452)"}}],"hasNextPage":false,"hasPreviousPage":false,"activityType":"all","actor":null,"timePeriod":"all","sort":"DESC","perPage":30,"cursor":"djE6ks8AAAAEaWHosQA","startCursor":null,"endCursor":null}},"title":"Activity ยท lenovo/llama-recipes"}