{"payload":{"header_redesign_enabled":false,"results":[],"type":"repositories","page":1,"page_count":0,"elapsed_millis":35,"errors":[],"result_count":0,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253Ajimmieliu%252Fflash-attention-with-bias-gradient%2B%2Blanguage%253ACuda","metadata":null,"csrf_tokens":{"/sponsors/batch_deferred_sponsor_buttons":{"post":"7ij-sPXa4BAitzJeNcVP1ztGMjJFAItfkXO6z_hvUnA1i86DL78rHddb5aVSTutdMcmy5EvgGNykSeKw6AN2vQ"}}},"title":"Repository search results"}