{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"OutEffHop","owner":"MAGICS-LAB","isFork":false,"description":"[ICML 2024] Outlier-Efficient Hopfield Layers for Large Transformer-Based Models","allTopics":["transformer","outliers","attention","attention-mechanism","outlier-removal","outlier","hopfield-neural-network","ptq","outlier-treatment","modern-hopfield-networks","modern-hopfield-model","icml-2024","softmax-1","quantized-friendly","no-op-outlier"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":1,"starsCount":5,"forksCount":0,"license":"MIT License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,3,0,0,0,0,2,0,0,4,2],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-05T07:55:39.636Z"}},{"type":"Public","name":"UHop","owner":"MAGICS-LAB","isFork":false,"description":"[ICML 2024] Uniform Memory Retrieval with Larger Capacity for Modern Hopfield Models","allTopics":["hopfield-network","associative-memory","hopfield","memory-capacity","network-capacity","modern-hopfield-networks","modern-hopfield-model","sparse-hopfield","hopfield-model","generalized-sparse-hopfield"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":0,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,3,0,0,0,0,0,0,0,1,1],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-03T22:17:45.391Z"}},{"type":"Public","name":"DNABERT_2","owner":"MAGICS-LAB","isFork":false,"description":"[ICLR 2024] DNABERT-2: Efficient Foundation Model and Benchmark for Multi-Species Genome","allTopics":["transcription-factors","genome","splice","dna","dataset","promoters","promoter","transcription-factor-binding","language-model","genome-analysis","promoter-analysis","transcription-factor-binding-site","covid","dna-processing","splice-site","dna-training"],"primaryLanguage":{"name":"Shell","color":"#89e051"},"pullRequestCount":3,"issueCount":27,"starsCount":192,"forksCount":44,"license":"Apache License 2.0","participation":[0,1,8,1,0,4,0,0,1,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-28T06:30:13.340Z"}},{"type":"Public","name":"BiSHop","owner":"MAGICS-LAB","isFork":false,"description":"[ICML 2024] BiSHop: Bi-Directional Cellular Learning for Tabular Data with Generalized Sparse Modern Hopfield Model","allTopics":["hopfield-network","tabular","hopfield","tabular-methods","tabular-model","modern-hopfield-networks","modern-hopfield-model","tabular-learn","generalized-sparse-modern-hopfield","sparse-hopfield"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":0,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,0,1,0,0,0,0,0,0,0,1,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-27T19:19:07.761Z"}},{"type":"Public","name":"DNABERT_S","owner":"MAGICS-LAB","isFork":false,"description":"DNABERT_S: Learning Species-Aware DNA Embedding with Genome Foundation Models","allTopics":["dna","embedding","dna-embedding"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":29,"forksCount":8,"license":null,"participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,1,2,0,0,0,0,0,0,0,0,0,1,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-13T20:30:13.198Z"}},{"type":"Public","name":"SparseModernHopfield","owner":"MAGICS-LAB","isFork":false,"description":"[NeurIPS 2023] On Sparse Modern Hopfield Model","allTopics":["transformer","attention","hopfield","modern-hopfield-networks","modern-hopfield-model"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":48,"forksCount":11,"license":"MIT License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,9,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-03-26T05:58:24.132Z"}},{"type":"Public","name":"STanHop","owner":"MAGICS-LAB","isFork":false,"description":"[ICLR 2024] STanHop: Sparse Tandem Hopfield Model for Memory-Enhanced Time Series Prediction","allTopics":["transformer","attention-mechanism","hopfield","time-series-prediction","time-series-forecasting","modern-hopfield-networks","modern-hopfield-model"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":2,"forksCount":0,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-01-18T23:22:42.029Z"}},{"type":"Public","name":"NonparametricHopfield","owner":"MAGICS-LAB","isFork":false,"description":"Nonparametric Modern Hopfield Models","allTopics":["efficient-transformers","efficient-attention","modern-hopfield-networks","modern-hopfield-model","efficient-hopfield-models","efficient-hopfield-networks"],"primaryLanguage":{"name":"Jupyter Notebook","color":"#DA5B0B"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":0,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-01-08T20:53:10.159Z"}},{"type":"Public","name":"DNABERT","owner":"MAGICS-LAB","isFork":true,"description":"[Bioinformatics] DNABERT: pre-trained Bidirectional Encoder Representations from Transformers model for DNA-language in genome","allTopics":["dna"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":151,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-10-26T20:04:37.361Z"}}],"repositoryCount":9,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Repositories"}