{"payload":{"pageCount":21,"repositories":[{"type":"Public","name":"GlotLID-language-identification-model","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Language Identification tool for more than 1600 languages (EMNLP 2023).","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":7,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-10-30T16:49:34.765Z"}},{"type":"Public","name":"streaming-llm-Streaming-Language-Models-with-Attention-Sinks","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Efficient Streaming Language Models with Attention Sinks","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":357,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-10-05T15:14:13.853Z"}},{"type":"Public","name":"ToRA-LLM-Agents-designed-to-solve-challenging-mathematical-reasoning-problems","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"ToRA is a series of Tool-integrated Reasoning LLM Agents designed to solve challenging mathematical reasoning problems by interacting with tools.","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":66,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-10-05T08:26:01.987Z"}},{"type":"Public","name":"graphologue-Exploring-LLM-Responses-with-Interactive-Diagrams","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Use GPT-4 to stream diagrams, instead of tokens, in real-time! (UIST 2023 Paper)","allTopics":[],"primaryLanguage":{"name":"TypeScript","color":"#3178c6"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":32,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-21T18:05:04.120Z"}},{"type":"Public","name":"LLM-Agent-Paper-List","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"The paper list of the 86-page paper \"The Rise and Potential of Large Language Model Based Agents: A Survey\" by Zhiheng Xi et al.","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":337,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-19T13:43:29.372Z"}},{"type":"Public","name":"agents-Framework-for-Autonomous-Language-Agents","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"An Open-source Framework for Autonomous Language Agents","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":359,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-15T16:59:42.519Z"}},{"type":"Public","name":"LLaMA-Adapter-Fine-tuning-of-LLaMA","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Fine-tuning LLaMA to follow Instructions within 1 Hour and 1.2M Parameters","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":362,"license":"GNU General Public License v3.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-15T08:15:28.454Z"}},{"type":"Public","name":"vllm-Efficient-Memory-Management-for-Large-Language-Model","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"A high-throughput and memory-efficient inference and serving engine for LLMs","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":3001,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-14T09:43:43.945Z"}},{"type":"Public","name":"DoLa-Decoding-by-Contrasting-Layers-Large-Language-Models","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Official implementation for the paper \"DoLa: Decoding by Contrasting Layers Improves Factuality in Large Language Models\"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":45,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-14T03:40:00.306Z"}},{"type":"Public","name":"OmniQuant-Omnidirectionally-Calibrated-Quantization-for-Large-Language-Models","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"OmniQuant is a simple and powerful quantization technique for LLMs. ","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":48,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-13T06:16:05.222Z"}},{"type":"Public","name":"Point-Bind_Point-LLM-Aligning-3D-with-Multi-modality","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Align 3D Point Cloud with Multi-modalities for Large Language Models","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":31,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-08T18:07:34.470Z"}},{"type":"Public","name":"modelscope-agent-Building-Customizable-Agent-System-with-LLMs","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"ModelScope-Agent: An agent framework connecting models in ModelScope with the world","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":241,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-07T03:38:04.382Z"}},{"type":"Public","name":"yarn-Efficient-Context-Window-Extension-of-Large-Language-Models","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"YaRN: Efficient Context Window Extension of Large Language Models","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":110,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-04T15:38:16.057Z"}},{"type":"Public","name":"belebele-Massively-Multilingual-NLU-Evaluation","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Repo for the Belebele dataset, a massively multilingual reading comprehension dataset.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":21,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-01T12:44:30.729Z"}},{"type":"Public","name":"DenseDiffusion-Training-free-Text-to-Image-Generation","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Official Pytorch Implementation of DenseDiffusion (ICCV 2023) ","allTopics":[],"primaryLanguage":{"name":"Jupyter Notebook","color":"#DA5B0B"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":30,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-30T12:47:41.325Z"}},{"type":"Public","name":"Qwen-VL-multimodal-version-of-the-large-model-series","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"The official repo of Qwen-VL chat & pretrained large vision language model proposed by Alibaba Cloud.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":323,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-28T06:28:15.992Z"}},{"type":"Public","name":"OmniQuant-quantization-algorithm-for-LLMs","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"An efficient, accurate, and omnibearing quantization algorithm for LLMs, encompassing both weight-only quantization (W4A16/W3A16/W2A16) and weight-activation quantization (W6A6, W4A4)","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":48,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-28T05:29:07.431Z"}},{"type":"Public","name":"Lemur-Pretrained-Large-Language-Models-Balancing-Text-and-Code-Capabilities","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Lemur: The State-of-the-art Open Pretrained Large Language Models Balancing Text and Code Capabilities","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":33,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-25T08:44:07.073Z"}},{"type":"Public","name":"IT3D-text-to-3D-Generation-with-Explicit-View-Synthesis","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":12,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-25T08:31:35.838Z"}},{"type":"Public","name":"codellama-family-of-large-language-models-for-code-based-on-Llama-2","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Inference code for CodeLlama models","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":1779,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-24T16:20:05.636Z"}},{"type":"Public","name":"WizardLM-Large-Pre-Trained-Language-Models-to-Follow-Complex-Instructions","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Family of instruction-following LLMs powered by Evol-Instruct: WizardLM, WizardCoder and WizardMath","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":706,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-24T09:41:05.424Z"}},{"type":"Public","name":"EasyEdit-Knowledge-Editing-Framework-for-Large-Language-Models.","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"An Easy-to-use Knowledge Editing Framework for LLMs.","allTopics":[],"primaryLanguage":{"name":"Jupyter Notebook","color":"#DA5B0B"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":191,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-24T07:09:35.634Z"}},{"type":"Public","name":"FlexFlow-Low-Latency-High-Performance-LLM-Serving","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"A distributed deep learning framework.","allTopics":[],"primaryLanguage":{"name":"C++","color":"#f34b7d"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":218,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-24T02:09:46.541Z"}},{"type":"Public","name":"tifa-Text-to-Image-Faithfulness-Evaluation-with-Question-Answering","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"TIFA: Accurate and Interpretable Text-to-Image Faithfulness Evaluation with Question Answering","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":7,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-23T01:33:57.450Z"}},{"type":"Public","name":"cursive-py-framework-for-interacting-with-LLMs.","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"✦ The intuitive python LLM framework","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":19,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-22T15:46:44.246Z"}},{"type":"Public","name":"Chat-Haruhi-Suzumiya-Anime-Character-in-Reality-via-Large-Language-Model","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Chat凉宫春日, 由李鲁鲁, 冷子昂等同学开发的模仿二次元对话的聊天机器人。","allTopics":[],"primaryLanguage":{"name":"Jupyter Notebook","color":"#DA5B0B"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":150,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-21T15:34:18.315Z"}},{"type":"Public","name":"discriminative_class_token-Text-to-Image-Diffusion-Models","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":3,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-21T08:54:20.625Z"}},{"type":"Public","name":"QuIP-Quantization-with-Incoherence-Processing","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"Code for paper: \"QuIP: 2-Bit Quantization of Large Language Models With Guarantees\"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":28,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-21T03:16:58.933Z"}},{"type":"Public","name":"IncognitoPilot-Large-Language-Model-LLM-with-a-Python-interpreter","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"An AI code interpreter for sensitive data, powered by GPT-4 or Llama 2.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":44,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-20T15:37:53.564Z"}},{"type":"Public","name":"BOLAA-LLM-Augmented-Autonomous-Agents-and-the-Innovative-BOLAA-Strategy","owner":"AI-Natural-Language-Processing-Lab","isFork":true,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":16,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-16T21:03:56.924Z"}}],"repositoryCount":628,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Repositories"}