{"payload":{"header_redesign_enabled":false,"results":[{"id":"598452366","archived":false,"color":"#3A4E3A","followers":139,"has_funding_file":false,"hl_name":"jundaf2/INT8-Flash-Attention-FMHA-Quantization","hl_trunc_description":null,"language":"Cuda","mirror":false,"owned_by_organization":false,"public":true,"repo":{"repository":{"id":598452366,"name":"INT8-Flash-Attention-FMHA-Quantization","owner_id":43091043,"owner_login":"jundaf2","updated_at":"2023-09-15T05:51:13.755Z","has_issues":true}},"sponsorable":false,"topics":[],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":69,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253Ajundaf2%252FINT8-Flash-Attention-FMHA-Quantization%2B%2Blanguage%253ACuda","metadata":null,"csrf_tokens":{"/jundaf2/INT8-Flash-Attention-FMHA-Quantization/star":{"post":"m5FSzTtj4nEoRItj2bv_jSVCHjNY6V3INwqpYtI3gkeDfP9M0GB6A2hIF4zi-5aWxFoTlb-gpMqnlj4DTb7Lyg"},"/jundaf2/INT8-Flash-Attention-FMHA-Quantization/unstar":{"post":"WGhfVxFsNJ19h5XCKf7ykNbFiNeUJXFIx27OrLzcnNoFsaCHtcW9DsIJHZ91YZRHe0bUV7noQF_MRGVufsW2Cw"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"yhFD_GbKtu663-hITIbYv6DDb1JLyMpzTllFAWzQ_p9OiPNG7wlAtpmDt4bDMWeCaQdTUnHbwFdY_XgNp3xeNQ"}}},"title":"Repository search results"}