diff --git a/src/viam/examples/modules/tflite/main.cpp b/src/viam/examples/modules/tflite/main.cpp index 9d969b4ae..d7adacb48 100644 --- a/src/viam/examples/modules/tflite/main.cpp +++ b/src/viam/examples/modules/tflite/main.cpp @@ -82,7 +82,7 @@ class MLModelServiceTFLite : public vsdk::MLModelService, public vsdk::Stoppable void stop() noexcept { using std::swap; try { - std::lock_guard lock(state_lock_); + const std::lock_guard lock(state_lock_); if (!stopped_) { stopped_ = true; std::shared_ptr state; @@ -93,8 +93,8 @@ class MLModelServiceTFLite : public vsdk::MLModelService, public vsdk::Stoppable } } - void reconfigure(vsdk::Dependencies dependencies, vsdk::ResourceConfig configuration) final - try { + void reconfigure(const vsdk::Dependencies& dependencies, + const vsdk::ResourceConfig& configuration) final try { // Care needs to be taken during reconfiguration. The // framework does not offer protection against invocation // during reconfiguration. Keep all state in a shared_ptr @@ -121,7 +121,7 @@ class MLModelServiceTFLite : public vsdk::MLModelService, public vsdk::Stoppable swap(state_, state); } - state = reconfigure_(std::move(dependencies), std::move(configuration)); + state = reconfigure_(dependencies, configuration); // Reconfiguration worked: put the state in under the lock, // release the lock, and then notify any callers waiting on @@ -140,7 +140,7 @@ class MLModelServiceTFLite : public vsdk::MLModelService, public vsdk::Stoppable std::shared_ptr infer(const named_tensor_views& inputs, const vsdk::AttributeMap& extra) final { - const auto state = lease_state_(); + auto state = lease_state_(); // We serialize access to the interpreter. We use a // unique_lock because we will move the lock into the shared @@ -244,6 +244,7 @@ class MLModelServiceTFLite : public vsdk::MLModelService, public vsdk::Stoppable // inference_result object is destroyed, the lock will be // released and the next caller can invoke the interpreter. auto* const views = &inference_result->views; + // NOLINTNEXTLINE(performance-move-const-arg): C++20 return {std::move(inference_result), views}; } @@ -359,7 +360,7 @@ class MLModelServiceTFLite : public vsdk::MLModelService, public vsdk::Stoppable // buffer which we can use with `TfLiteModelCreate`. That // still requires that the buffer be kept valid, but that's // more easily done. - std::ifstream in(*model_path_string, std::ios::in | std::ios::binary); + const std::ifstream in(*model_path_string, std::ios::in | std::ios::binary); if (!in) { std::ostringstream buffer; buffer << service_name << ": Failed to open file for `model_path` "