{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"llama3","owner":"meta-llama","isFork":false,"description":"The official Meta Llama 3 GitHub site","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":20,"issueCount":100,"starsCount":21755,"forksCount":2210,"license":"Other","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,22,4,70,10,1,1,5,6,0,1],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-07T03:08:30.894Z"}},{"type":"Public","name":"llama-recipes","owner":"meta-llama","isFork":false,"description":"Scripts for fine-tuning Meta Llama3 with composable FSDP & PEFT methods to cover single/multi-node GPUs. Supports default & custom datasets for applications such as summarization and Q&A. Supporting a number of candid inference solutions such as HF TGI, VLLM for local or cloud deployment. Demo apps to showcase Meta Llama3 for WhatsApp & Messenger.","allTopics":["python","machine-learning","ai","llama","finetuning","llm","langchain","vllm","llama2","pytorch"],"primaryLanguage":{"name":"Jupyter Notebook","color":"#DA5B0B"},"pullRequestCount":45,"issueCount":81,"starsCount":10014,"forksCount":1415,"license":null,"participation":[0,0,0,0,0,30,44,48,31,11,3,56,54,21,19,17,24,26,21,15,34,12,12,8,6,15,32,41,15,9,5,11,14,4,43,21,2,17,11,11,12,7,2,12,57,46,52,26,66,19,8,8],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-06T23:43:41.129Z"}},{"type":"Public","name":"PurpleLlama","owner":"meta-llama","isFork":false,"description":"Set of tools to assess and improve LLM security.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":3,"starsCount":2083,"forksCount":335,"license":"Other","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,6,0,0,0,2,3,1,1,3,1,4,4,7,8,0,17,18,16,44,6,7,4,1,2,0,3],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-06T22:53:08.512Z"}},{"type":"Public","name":"codellama","owner":"meta-llama","isFork":false,"description":"Inference code for CodeLlama models","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":7,"issueCount":89,"starsCount":15335,"forksCount":1767,"license":"Other","participation":[0,0,0,0,0,0,0,0,0,0,1,5,1,9,1,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-21T21:14:23.157Z"}},{"type":"Public","name":"llama","owner":"meta-llama","isFork":false,"description":"Inference code for Llama models","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":49,"issueCount":310,"starsCount":53807,"forksCount":9269,"license":"Other","participation":[0,0,0,0,0,15,5,0,7,2,1,17,13,16,8,3,1,4,5,0,6,3,5,0,0,0,0,0,0,0,0,0,0,0,0,0,1,3,4,2,4,1,0,3,0,0,0,0,2,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-15T03:49:42.993Z"}}],"repositoryCount":5,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Repositories"}