From c539ec40d20fbd45a38684eabd829b7cd8ad0b9a Mon Sep 17 00:00:00 2001 From: LoserCheems Date: Tue, 11 Nov 2025 15:36:10 +0800 Subject: [PATCH 1/5] Update BASE_WHEEL_URL to point to the correct GitHub repository --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6c2a8b0..fc720a5 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,7 @@ PACKAGE_NAME = "flash_sparse_attn" BASE_WHEEL_URL = ( - "https://github.com/SmallDoges/flash-sparse-attention/releases/download/{tag_name}/{wheel_name}" + "https://github.com/flash-algo/flash-sparse-attention/releases/download/{tag_name}/{wheel_name}" ) # FORCE_BUILD: Force a fresh build locally, instead of attempting to find prebuilt wheels From 8dff08c5dfb6ef20f74418928e0311e0c2537a6d Mon Sep 17 00:00:00 2001 From: LoserCheems Date: Tue, 11 Nov 2025 15:36:27 +0800 Subject: [PATCH 2/5] Update GitHub Issues links in SECURITY.md to point to the correct repository --- SECURITY.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/SECURITY.md b/SECURITY.md index 1abb585..9132c03 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -50,7 +50,7 @@ If you discover a security vulnerability, please report it responsibly: - Include: Detailed description, reproduction steps, and potential impact **For general bugs:** -- Use our [GitHub Issues](https://github.com/SmallDoges/flash-sparse-attention/issues) +- Use our [GitHub Issues](https://github.com/flash-algo/flash-sparse-attention/issues) - Follow our [contributing guidelines](CONTRIBUTING.md) ## Response Timeline @@ -108,5 +108,5 @@ For security-related questions or concerns: - Project maintainers: See [AUTHORS](AUTHORS) file For general support: -- GitHub Issues: https://github.com/SmallDoges/flash-sparse-attention/issues -- Documentation: https://github.com/SmallDoges/flash-sparse-attention/tree/main/docs/ \ No newline at end of file +- GitHub Issues: https://github.com/flash-algo/flash-sparse-attention/issues +- Documentation: https://github.com/flash-algo/flash-sparse-attention/tree/main/docs/ \ No newline at end of file From c626cfd22280040bd492ea710dc98ff12d50ae38 Mon Sep 17 00:00:00 2001 From: LoserCheems Date: Tue, 11 Nov 2025 15:36:38 +0800 Subject: [PATCH 3/5] Update README.md to correct repository links and image alt text --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 03a817e..ae52c6b 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@
@@ -67,7 +67,7 @@ pip install flash-sparse-attn --no-build-isolation Alternatively, you can compile and install from source: ```bash -git clone https://github.com/SmallDoges/flash-sparse-attn.git +git clone https://github.com/flash-algo/flash-sparse-attn.git cd flash-sparse-attn pip install . --no-build-isolation ``` @@ -293,8 +293,8 @@ We welcome contributions from the community! FSA is an open-source project and w ### How to Contribute -- **Report bugs**: Found a bug? Please [open an issue](https://github.com/SmallDoges/flash_sparse_attn/issues/new/choose) -- **Request features**: Have an idea for improvement? [Let us know](https://github.com/SmallDoges/flash_sparse_attn/issues/new/choose) +- **Report bugs**: Found a bug? Please [open an issue](https://github.com/flash-algo/flash_sparse_attn/issues/new/choose) +- **Request features**: Have an idea for improvement? [Let us know](https://github.com/flash-algo/flash_sparse_attn/issues/new/choose) - **Submit code**: Ready to contribute code? Check our [Contributing Guide](CONTRIBUTING.md) - **Improve docs**: Help us make the documentation better From 9b412559632817f69ecd124bb8ee1d468ad1ef2f Mon Sep 17 00:00:00 2001 From: LoserCheems Date: Tue, 11 Nov 2025 15:36:53 +0800 Subject: [PATCH 4/5] Update README_zh.md to correct repository links and image alt text --- README_zh.md | 8 ++++---- pyproject.toml | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/README_zh.md b/README_zh.md index 8bea29c..fdbaf73 100644 --- a/README_zh.md +++ b/README_zh.md @@ -1,5 +1,5 @@
@@ -67,7 +67,7 @@ pip install flash-sparse-attn --no-build-isolation 或者, 您可以从源代码编译和安装: ```bash -git clone https://github.com/SmallDoges/flash-sparse-attn.git +git clone https://github.com/flash-algo/flash-sparse-attn.git cd flash-sparse-attn pip install . --no-build-isolation ``` @@ -292,8 +292,8 @@ python benchmarks/grad_equivalence.py ### 如何贡献 -- **报告错误**: 发现了错误?请[提交 issue](https://github.com/SmallDoges/flash_sparse_attn/issues/new/choose) -- **功能请求**: 有改进想法?[告诉我们](https://github.com/SmallDoges/flash_sparse_attn/issues/new/choose) +- **报告错误**: 发现了错误?请[提交 issue](https://github.com/flash-algo/flash_sparse_attn/issues/new/choose) +- **功能请求**: 有改进想法?[告诉我们](https://github.com/flash-algo/flash_sparse_attn/issues/new/choose) - **提交代码**: 准备贡献代码?查看我们的[贡献指南](CONTRIBUTING.md) - **改进文档**: 帮助我们完善文档 diff --git a/pyproject.toml b/pyproject.toml index 1158eec..1626c13 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,9 +40,9 @@ classifiers = [ ] [project.urls] -Homepage = "https://github.com/SmallDoges/flash-sparse-attention" -Source = "https://github.com/SmallDoges/flash-sparse-attention" -Issues = "https://github.com/SmallDoges/flash-sparse-attention/issues" +Homepage = "https://github.com/flash-algo/flash-sparse-attention" +Source = "https://github.com/flash-algo/flash-sparse-attention" +Issues = "https://github.com/flash-algo/flash-sparse-attention/issues" [project.optional-dependencies] triton = [ From 5aa140bbc513e27ab0cb1d8582d2bc16feb1d8db Mon Sep 17 00:00:00 2001 From: LoserCheems Date: Tue, 11 Nov 2025 15:37:11 +0800 Subject: [PATCH 5/5] Update repository links in CITATION.cff and CONTRIBUTING.md to point to the correct GitHub repository --- CITATION.cff | 2 +- CONTRIBUTING.md | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/CITATION.cff b/CITATION.cff index 4aaeee9..f49a1f7 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -2,7 +2,7 @@ cff-version: "1.2.0" date-released: 2025-06 message: "If you use this software, please cite it using these metadata." title: "Flash Sparse Attention: Trainable Dynamic Mask Sparse Attention" -url: "https://github.com/SmallDoges/flash-sparse-attention" +url: "https://github.com/flash-algo/flash-sparse-attention" authors: - family-names: Shi given-names: Jingze diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ba79358..8ea1c01 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,7 +4,7 @@ Everyone is welcome to contribute, and we value everybody's contribution. Code c It also helps us if you spread the word! Reference the library in blog posts about the awesome projects it made possible, shout out on Twitter every time it has helped you, or simply ⭐️ the repository to say thank you. -However you choose to contribute, please be mindful and respect our [code of conduct](https://github.com/SmallDoges/flash-sparse-attention/blob/main/CODE_OF_CONDUCT.md). +However you choose to contribute, please be mindful and respect our [code of conduct](https://github.com/flash-algo/flash-sparse-attention/blob/main/CODE_OF_CONDUCT.md). ## Ways to contribute @@ -16,7 +16,7 @@ There are several ways you can contribute to Flash-DMA: * Contribute to the examples, benchmarks, or documentation. * Improve CUDA kernel performance. -If you don't know where to start, there is a special [Good First Issue](https://github.com/SmallDoges/flash-sparse-attention/contribute) listing. It will give you a list of open issues that are beginner-friendly and help you start contributing to open-source. +If you don't know where to start, there is a special [Good First Issue](https://github.com/flash-algo/flash-sparse-attention/contribute) listing. It will give you a list of open issues that are beginner-friendly and help you start contributing to open-source. > All contributions are equally valuable to the community. 🥰 @@ -81,14 +81,14 @@ You will need basic `git` proficiency to contribute to Flash-DMA. You'll need ** ### Development Setup -1. Fork the [repository](https://github.com/SmallDoges/flash-sparse-attention) by clicking on the **Fork** button. +1. Fork the [repository](https://github.com/flash-algo/flash-sparse-attention) by clicking on the **Fork** button. 2. Clone your fork to your local disk, and add the base repository as a remote: ```bash git clone https://github.com//flash-sparse-attention.git cd flash-sparse-attention - git remote add upstream https://github.com/SmallDoges/flash-sparse-attention.git + git remote add upstream https://github.com/flash-algo/flash-sparse-attention.git ``` 3. Create a new branch to hold your development changes: @@ -157,7 +157,7 @@ You will need basic `git` proficiency to contribute to Flash-DMA. You'll need ** ### Tests -An extensive test suite is included to test the library behavior and performance. Tests can be found in the [tests](https://github.com/SmallDoges/flash-sparse-attention/tree/main/tests) folder and benchmarks in the [benchmarks](https://github.com/SmallDoges/flash-sparse-attention/tree/main/benchmarks) folder. +An extensive test suite is included to test the library behavior and performance. Tests can be found in the [tests](https://github.com/flash-algo/flash-sparse-attention/tree/main/tests) folder and benchmarks in the [benchmarks](https://github.com/flash-algo/flash-sparse-attention/tree/main/benchmarks) folder. We use `pytest` for testing. From the root of the repository, run: @@ -200,6 +200,6 @@ If you discover a security vulnerability, please send an e-mail to the maintaine ## Questions? -If you have questions about contributing, feel free to ask in the [GitHub Discussions](https://github.com/SmallDoges/flash-sparse-attention/discussions) or open an issue. +If you have questions about contributing, feel free to ask in the [GitHub Discussions](https://github.com/flash-algo/flash-sparse-attention/discussions) or open an issue. Thank you for contributing to Flash Sparse Attention! 🚀