{"payload":{"header_redesign_enabled":false,"results":[{"id":"494232964","archived":false,"color":"#3572A5","followers":11739,"has_funding_file":false,"hl_name":"Dao-AILab/flash-attention","hl_trunc_description":"Fast and memory-efficient exact attention","language":"Python","mirror":false,"owned_by_organization":true,"public":true,"repo":{"repository":{"id":494232964,"name":"flash-attention","owner_id":139507659,"owner_login":"Dao-AILab","updated_at":"2024-06-27T09:38:13.862Z","has_issues":true}},"sponsorable":false,"topics":[],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":73,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253ADao-AILab%252Fflash-attention%2B%2Blanguage%253APython","metadata":null,"csrf_tokens":{"/Dao-AILab/flash-attention/star":{"post":"TvRhgmS6uJTXnd2Tm8qfLWsSS-p7zKF-EOkuW9qYl0DlqGjMZVJx0BR9t4H50xJS5vYfzqT1QT1HuGDt4jSLKw"},"/Dao-AILab/flash-attention/unstar":{"post":"cKiFkJnOYSKvTpKdZJ1cWnH4tRUw9OTOCFDqnO4F2y_KanJKcRpUCNhwFPZEhOUhUAkhN1QLxDjWpH6FBFsCdA"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"3BbLI-BJe0w0ynuCKXCZx3DBmtP5GdG1ZfNeskSfQV8nOLBHJlUC3IkzrTH1ZR1ShYzaeAaw7B1TpMC878TIug"}}},"title":"Repository search results"}