{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"arcee-python","owner":"arcee-ai","isFork":false,"description":"The Arcee client for executing domain-adpated language model routines https://pypi.org/project/arcee-py/","allTopics":["ai","llm","llmops","llm-training","llm-inference"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":3,"issueCount":2,"starsCount":14,"forksCount":3,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-02T18:12:09.408Z"}},{"type":"Public","name":"Megatron-LM","owner":"arcee-ai","isFork":true,"description":"domain adapted MOE training","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":2,"issueCount":0,"starsCount":0,"forksCount":2089,"license":"Other","participation":[60,24,50,17,36,46,70,50,41,60,35,35,62,111,103,135,84,26,43,76,34,33,41,50,22,5,32,35,49,66,43,18,9,17,23,30,54,88,45,61,60,52,14,41,17,24,19,28,31,26,0,1],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-01T18:47:45.546Z"}},{"type":"Public","name":"mergekit","owner":"arcee-ai","isFork":false,"description":"Tools for merging pretrained large language models.","allTopics":["llama","llm","model-merging"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":14,"issueCount":127,"starsCount":4017,"forksCount":346,"license":"GNU Lesser General Public License v3.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-29T03:51:38.469Z"}},{"type":"Public","name":"DALM","owner":"arcee-ai","isFork":false,"description":"Domain Adapted Language Modeling Toolkit - E2E RAG","allTopics":["retrieval","language-model","llm","retrieval-augmented-generation"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":2,"issueCount":6,"starsCount":277,"forksCount":33,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-11T21:43:45.606Z"}},{"type":"Public","name":"Megatron-LM-Llama-70B","owner":"arcee-ai","isFork":true,"description":"Ongoing research training transformer models at scale","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":2089,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-29T00:42:16.873Z"}},{"type":"Public","name":"Pai-Megatron-Patch-Llama3-70B","owner":"arcee-ai","isFork":true,"description":"The official repo of Pai-Megatron-Patch for LLM & VLM large scale training developed by Alibaba Cloud.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":73,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,8,9,9,2,0,9,11,6,7,7,1,6,15,10,4,0,0,6,2,1,2,2,5,0,0,5,4,4,1,3,1,0,6,17,8,2,1,14,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-24T23:54:47.892Z"}},{"type":"Public","name":"Pai-Megatron-Patch","owner":"arcee-ai","isFork":true,"description":"The official repo of Pai-Megatron-Patch for LLM & VLM large scale training developed by Alibaba Cloud.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":73,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-16T00:09:11.255Z"}},{"type":"Public","name":"Model-Tools","owner":"arcee-ai","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":4,"forksCount":0,"license":null,"participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,6,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-06T18:23:04.375Z"}},{"type":"Public","name":"PruneMe","owner":"arcee-ai","isFork":false,"description":"Automated Identification of Redundant Layer Blocks for Pruning in Large Language Models","allTopics":["pruning","merging","pruning-algorithms","llm"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":4,"starsCount":144,"forksCount":18,"license":null,"participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12,2,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-04-23T00:03:24.634Z"}},{"type":"Public","name":"llm-autoeval","owner":"arcee-ai","isFork":true,"description":"Automatically evaluate your LLMs in Google Colab","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":77,"license":"MIT License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,5,0,11,0,0,0,2,54,0,0,0,0,4,13,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-04-15T15:42:29.038Z"}},{"type":"Public","name":"arcee-trainium-recipes","owner":"arcee-ai","isFork":false,"description":"The repository contains all the set-up required to execute trainium training jobs. ","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":2,"forksCount":1,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-03-22T11:21:38.541Z"}},{"type":"Public","name":"documentation","owner":"arcee-ai","isFork":false,"description":"Arcee docs repository","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-02-15T22:39:18.014Z"}},{"type":"Public","name":"notebooks","owner":"arcee-ai","isFork":false,"description":"Demo Notebooks showcasing Arcee and DALM","allTopics":[],"primaryLanguage":{"name":"Jupyter Notebook","color":"#DA5B0B"},"pullRequestCount":0,"issueCount":0,"starsCount":4,"forksCount":0,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-10-04T19:59:14.211Z"}},{"type":"Public","name":"code-llama-rag","owner":"arcee-ai","isFork":false,"description":"The first RAG retriever tuned particularly for code and code Llama","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":10,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-10-03T02:00:03.336Z"}},{"type":"Public","name":"Stable-Diffusion-text-to-image","owner":"arcee-ai","isFork":false,"description":"A simple stable diffusion text-to-image notebook","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":2,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-03-03T15:09:58.391Z"}}],"repositoryCount":15,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"arcee-ai repositories"}