{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"LLM4BioHypoGen","owner":"TsinghuaC3I","isFork":false,"description":"Accepted to COLM 2024, \"Large Language Models as Biomedical Hypothesis Generators: A Comprehensive Evaluation\"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":2,"forksCount":0,"license":null,"participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-15T09:27:12.352Z"}},{"type":"Public","name":"UltraMedical","owner":"TsinghuaC3I","isFork":false,"description":"UltraMedical: Building Specialized Generalists in Biomedicine","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":41,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-12T12:45:15.973Z"}},{"type":"Public","name":"FS-GEN","owner":"TsinghuaC3I","isFork":false,"description":"Fast and Slow Generating: An Empirical Study on Large and Small Language Models Collaborative Decoding. https://arxiv.org/abs/2406.12295","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":6,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-19T00:40:22.547Z"}},{"type":"Public","name":"Intuitive-Fine-Tuning","owner":"TsinghuaC3I","isFork":false,"description":"Intuitive Fine-Tuning: Towards Simplifying Alignment into a Single Process","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":2,"starsCount":8,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-04T06:16:28.050Z"}},{"type":"Public","name":"CoGenesis","owner":"TsinghuaC3I","isFork":false,"description":"CoGenesis: A Framework Collaborating Large and Small Language Models for Secure Context-Aware Instruction Following. ACL@2024 Main","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-01T12:34:40.864Z"}},{"type":"Public","name":".github","owner":"TsinghuaC3I","isFork":false,"description":"","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-04-27T08:12:38.729Z"}},{"type":"Public","name":"SoRA","owner":"TsinghuaC3I","isFork":false,"description":"The source code of the EMNLP 2023 main conference paper: Sparse Low-rank Adaptation of Pre-trained Language Models.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":6,"starsCount":57,"forksCount":8,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-03-05T03:47:15.388Z"}},{"type":"Public","name":"CRaSh","owner":"TsinghuaC3I","isFork":false,"description":"The source code of the EMNLP 2023 main conference paper: \"CRaSh: Clustering, Removing, and Sharing Enhance Fine-tuning without Full Large Language Model.\"","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":1,"starsCount":5,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-10-17T04:12:35.608Z"}}],"repositoryCount":8,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"TsinghuaC3I repositories"}