{"payload":{"header_redesign_enabled":false,"results":[{"id":"660634002","archived":false,"color":"#384d54","followers":11,"has_funding_file":false,"hl_name":"developer0hye/onnxruntime-cuda-cpp-example","hl_trunc_description":"Examples for inference models with ONNXRuntime and CUDA","language":"Dockerfile","mirror":false,"owned_by_organization":false,"public":true,"repo":{"repository":{"id":660634002,"name":"onnxruntime-cuda-cpp-example","owner_id":35001605,"owner_login":"developer0hye","updated_at":"2023-07-28T06:09:07.137Z","has_issues":true}},"sponsorable":false,"topics":["cpp","gpu","example","cuda","inference","tensorrt","onnx","onnxruntime"],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":63,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253Adeveloper0hye%252Fonnxruntime-cuda-cpp-example%2B%2Blanguage%253ADockerfile","metadata":null,"csrf_tokens":{"/developer0hye/onnxruntime-cuda-cpp-example/star":{"post":"DLJ5PBYfSIwxPxaMaIQIYB-clUvXux3BWr2psg6NqaAJo-igbgC48AkYxGRbDmP8FSwHdVMEWuyJsTk-o0BaWA"},"/developer0hye/onnxruntime-cuda-cpp-example/unstar":{"post":"0vRxqYIOlRYHB5JWYysh6Gbuyd0zShlxyHL4WHglCnDSdDK_eX05vxJdz1LimwMk5EQy5f7fsVdTgFdqC8VzkQ"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"LwdC5aT9nA5LbHXfvvyNdiqVljlDagZfZBlD4VfHAufpiPTOAYKQVk0nC0f8bcdDNub0WR3XbCqUgb1MylLV4g"}}},"title":"Repository search results"}