diff --git a/examples/create_mask.rs b/examples/create_mask.rs index bbbce131..e2f4401b 100644 --- a/examples/create_mask.rs +++ b/examples/create_mask.rs @@ -1,21 +1,22 @@ //! # Create Mask -//! Reference: [opencv/samples/cpp/create_mask.cpp](https://github.com/opencv/opencv/blob/4.x/samples/cpp/create_mask.cpp) - -use opencv::{ - core::{bitwise_and, find_file, CommandLineParser, Point, Scalar, CV_8UC1, CV_8UC3}, - highgui::{self, imshow}, - imgcodecs::{imread, IMREAD_COLOR}, - imgproc, - prelude::*, -}; - -use std::{ - env, process, - sync::{ - atomic::{self, AtomicBool}, - Arc, Mutex, - }, -}; +//! Reference: [opencv/samples/cpp/create_mask.cpp](https://github.com/opencv/opencv/blob/4.9.0/samples/cpp/create_mask.cpp) + +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::{Arc, Mutex}; +use std::{env, process}; + +use opencv::core::{bitwise_and, find_file, CommandLineParser, Point, Scalar, Vec3b}; +use opencv::highgui::imshow; +use opencv::imgcodecs::{imread, IMREAD_COLOR}; +use opencv::prelude::*; +use opencv::{highgui, imgproc, not_opencv_branch_4, opencv_branch_4, Result}; + +opencv_branch_4! { + use opencv::imgproc::LINE_8; +} +not_opencv_branch_4! { + use opencv::core::LINE_8; +} const SOURCE_WINDOW: &str = "Source image"; @@ -29,13 +30,13 @@ enum DrawingState { Resetting, } -fn main() { +fn main() -> Result<()> { let args: Vec = env::args().collect(); let (argc, argv) = (args.len() as i32, args.iter().map(|s| s.as_str()).collect::>()); - let mut parser = CommandLineParser::new(argc, &argv, "{@input | lena.jpg | input image}").unwrap(); - parser.about("This program demonstrates using mouse events\n").unwrap(); - parser.print_message().unwrap(); + let mut parser = CommandLineParser::new(argc, &argv, "{@input | lena.jpg | input image}")?; + parser.about("This program demonstrates using mouse events\n")?; + parser.print_message()?; println!( "\n\tleft mouse button - set a point to create mask shape\n\ \tright mouse button - create mask from points\n\ @@ -52,13 +53,13 @@ fn main() { .unwrap_or_else(|_| panic!("Cannot find input_image: {}", input_image)); let [src, mut next_frame, mut mask, mut final_img]: [Mat; 4]; - src = imread(&input_image_path, IMREAD_COLOR).unwrap(); + src = imread(&input_image_path, IMREAD_COLOR)?; if src.empty() { eprintln!("Error opening image: {}", input_image); process::exit(-1); } - highgui::named_window(SOURCE_WINDOW, highgui::WINDOW_AUTOSIZE).unwrap(); + highgui::named_window(SOURCE_WINDOW, highgui::WINDOW_AUTOSIZE)?; let mouse_event_data = (highgui::MouseEventTypes::EVENT_MOUSEWHEEL, 0, 0, 0); let (mouse_event_data, should_handle_mouse_event) = (Arc::new(Mutex::new(mouse_event_data)), Arc::new(AtomicBool::new(false))); @@ -71,28 +72,28 @@ fn main() { if let Ok(mut mouse_data) = mouse_data.lock() { *mouse_data = (mouse_event_from_i32(event), x, y, flags); } - should_handle_mouse_event.store(true, atomic::Ordering::Relaxed); + should_handle_mouse_event.store(true, Ordering::Relaxed); } }; highgui::set_mouse_callback(SOURCE_WINDOW, Some(Box::new(mouse_event_dispatcher))).expect("Cannot set mouse callback"); - highgui::imshow(SOURCE_WINDOW, &src).unwrap(); + imshow(SOURCE_WINDOW, &src)?; let (mut marker_points, mut drawing_state) = (Vec::::new(), DrawingState::Init); - next_frame = Mat::zeros_size(src.size().unwrap(), CV_8UC3).unwrap().to_mat().unwrap(); + next_frame = Mat::zeros_size(src.size()?, Vec3b::opencv_type())?.to_mat()?; loop { // Press Esc to exit - if highgui::wait_key(10).unwrap() == 27 { - break; + if highgui::wait_key(10)? == 27 { + break Ok(()); } let (mouse_event, x, y, _) = { - if !should_handle_mouse_event.load(atomic::Ordering::Relaxed) { + if !should_handle_mouse_event.load(Ordering::Relaxed) { continue; } else { - should_handle_mouse_event.store(false, atomic::Ordering::Relaxed); + should_handle_mouse_event.store(false, Ordering::Relaxed); if let Ok(mouse_event_data) = mouse_event_data.lock() { *mouse_event_data @@ -102,7 +103,7 @@ fn main() { } }; - drawing_state = self::state_transform(drawing_state, mouse_event); + drawing_state = state_transform(drawing_state, mouse_event); match drawing_state { DrawingState::Init | DrawingState::DrawingMarkerPointFinished => { /* do nothing */ } @@ -112,16 +113,7 @@ fn main() { } let point = Point::new(x, y); - imgproc::circle( - &mut next_frame, - point, - 2, - Scalar::new(0., 0., 255., 0.), - -1, - imgproc::LINE_8, - 0, - ) - .unwrap(); + imgproc::circle(&mut next_frame, point, 2, Scalar::new(0., 0., 255., 0.), -1, LINE_8, 0)?; marker_points.push(point); if marker_points.len() > 1 { @@ -131,13 +123,12 @@ fn main() { point, Scalar::new(0., 0., 255., 0.), 2, - imgproc::LINE_8, + LINE_8, 0, - ) - .unwrap(); + )?; } - imshow(SOURCE_WINDOW, &next_frame).unwrap(); + imshow(SOURCE_WINDOW, &next_frame)?; } DrawingState::DrawingMask => { if !marker_points.is_empty() { @@ -145,38 +136,29 @@ fn main() { imgproc::polylines( &mut next_frame, - &Mat::from_slice(marker_points.as_slice()).unwrap(), + &Mat::from_slice(marker_points.as_slice())?, true, Scalar::new(0., 0., 0., 0.), 2, - imgproc::LINE_8, + LINE_8, 0, - ) - .unwrap(); + )?; - imshow(SOURCE_WINDOW, &next_frame).unwrap(); + imshow(SOURCE_WINDOW, &next_frame)?; } } DrawingState::DrawingMaskFinished => { if !marker_points.is_empty() { - final_img = Mat::zeros_size(src.size().unwrap(), CV_8UC3).unwrap().to_mat().unwrap(); - mask = Mat::zeros_size(src.size().unwrap(), CV_8UC1).unwrap().to_mat().unwrap(); - - imgproc::fill_poly( - &mut mask, - &Mat::from_slice(marker_points.as_slice()).unwrap(), - Scalar::new(255., 255., 255., 255.), - imgproc::LINE_8, - 0, - Point::default(), - ) - .unwrap(); + final_img = Mat::zeros_size(src.size()?, Vec3b::opencv_type())?.to_mat()?; + mask = Mat::zeros_size(src.size()?, u8::opencv_type())?.to_mat()?; + + imgproc::fill_poly_def(&mut mask, &Mat::from_slice(marker_points.as_slice())?, Scalar::all(255.))?; - bitwise_and(&src, &src, &mut final_img, &mask).unwrap(); + bitwise_and(&src, &src, &mut final_img, &mask)?; - imshow("Mask", &mask).unwrap(); - imshow("Result", &final_img).unwrap(); - imshow(SOURCE_WINDOW, &next_frame).unwrap(); + imshow("Mask", &mask)?; + imshow("Result", &final_img)?; + imshow(SOURCE_WINDOW, &next_frame)?; } } DrawingState::Resetting => { @@ -184,7 +166,7 @@ fn main() { marker_points.clear(); next_frame = src.clone(); - imshow(SOURCE_WINDOW, &next_frame).unwrap(); + imshow(SOURCE_WINDOW, &next_frame)?; } } } @@ -196,9 +178,9 @@ fn main() { /// # Panics /// /// Panics if the argument less than 0 or greater than 11. -fn mouse_event_from_i32(value: i32) -> opencv::highgui::MouseEventTypes { - (value.gt(&(opencv::highgui::MouseEventTypes::EVENT_MOUSEHWHEEL as i32/* 11 */)) - || (value.lt(&(opencv::highgui::MouseEventTypes::EVENT_MOUSEMOVE as i32/* 0 */)))) +fn mouse_event_from_i32(value: i32) -> highgui::MouseEventTypes { + (value.gt(&(highgui::MouseEventTypes::EVENT_MOUSEHWHEEL as i32/* 11 */)) + || (value.lt(&(highgui::MouseEventTypes::EVENT_MOUSEMOVE as i32/* 0 */)))) .then(|| panic!("Invalid cv::highgui::MouseEventTypes value: {}", value)); // Safe because of the previous check diff --git a/examples/video_facedetect.rs b/examples/video_facedetect.rs index b642bed1..81a15f39 100644 --- a/examples/video_facedetect.rs +++ b/examples/video_facedetect.rs @@ -1,20 +1,16 @@ use std::thread; use std::time::Duration; +use opencv::core::{Rect, Size, Vector}; use opencv::prelude::*; -use opencv::{core, highgui, imgproc, objdetect, types, videoio, Result}; +use opencv::{core, highgui, imgproc, objdetect, videoio, Result}; fn main() -> Result<()> { - let window = "video capture"; - highgui::named_window_def(window)?; - let (xml, mut cam) = { - ( - core::find_file_def("haarcascades/haarcascade_frontalface_alt.xml")?, - videoio::VideoCapture::new(0, videoio::CAP_ANY)?, // 0 is the default camera - ) - }; - let opened = videoio::VideoCapture::is_opened(&cam)?; - if !opened { + const WINDOW: &str = "video capture"; + highgui::named_window_def(WINDOW)?; + let xml = core::find_file_def("haarcascades/haarcascade_frontalface_alt.xml")?; + let mut cam = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; // 0 is the default camera + if !cam.is_opened()? { panic!("Unable to open default camera!"); } let mut face = objdetect::CascadeClassifier::new(&xml)?; @@ -28,31 +24,24 @@ fn main() -> Result<()> { let mut gray = Mat::default(); imgproc::cvt_color_def(&frame, &mut gray, imgproc::COLOR_BGR2GRAY)?; let mut reduced = Mat::default(); - imgproc::resize( - &gray, - &mut reduced, - core::Size { width: 0, height: 0 }, - 0.25f64, - 0.25f64, - imgproc::INTER_LINEAR, - )?; - let mut faces = types::VectorOfRect::new(); + imgproc::resize(&gray, &mut reduced, Size::new(0, 0), 0.25, 0.25, imgproc::INTER_LINEAR)?; + let mut faces = Vector::new(); face.detect_multi_scale( &reduced, &mut faces, 1.1, 2, objdetect::CASCADE_SCALE_IMAGE, - core::Size { width: 30, height: 30 }, - core::Size { width: 0, height: 0 }, + Size::new(30, 30), + Size::new(0, 0), )?; println!("faces: {}", faces.len()); for face in faces { println!("face {face:?}"); - let scaled_face = core::Rect::new(face.x * 4, face.y * 4, face.width * 4, face.height * 4); + let scaled_face = Rect::new(face.x * 4, face.y * 4, face.width * 4, face.height * 4); imgproc::rectangle_def(&mut frame, scaled_face, (0, 255, 0).into())?; } - highgui::imshow(window, &frame)?; + highgui::imshow(WINDOW, &frame)?; if highgui::wait_key(10)? > 0 { break; } diff --git a/examples/video_features.rs b/examples/video_features.rs index 5ecf1e07..ae8b6862 100644 --- a/examples/video_features.rs +++ b/examples/video_features.rs @@ -1,38 +1,25 @@ +use opencv::core::Vector; use opencv::prelude::*; -use opencv::{core, features2d, highgui, imgproc, videoio, Result}; +use opencv::{features2d, highgui, imgproc, videoio, Result}; fn main() -> Result<()> { let window = "video capture"; highgui::named_window(window, 1)?; let mut cam = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; // 0 is the default camera - let opened = videoio::VideoCapture::is_opened(&cam)?; - if !opened { + if !cam.is_opened()? { panic!("Unable to open default camera!"); } let mut orb = features2d::ORB::create_def()?; loop { let mut frame = Mat::default(); cam.read(&mut frame)?; - if frame.size()?.width > 0 { + if frame.cols() > 0 { let mut gray = Mat::default(); - imgproc::cvt_color(&frame, &mut gray, imgproc::COLOR_BGR2GRAY, 0)?; - let mut kps = opencv::types::VectorOfKeyPoint::new(); - let mask = Mat::default(); - orb.detect(&gray, &mut kps, &mask)?; + imgproc::cvt_color_def(&frame, &mut gray, imgproc::COLOR_BGR2GRAY)?; + let mut kps = Vector::new(); + orb.detect_def(&gray, &mut kps)?; let mut display = Mat::default(); - opencv::opencv_branch_4! { - let default_draw_matches_flags = features2d::DrawMatchesFlags::DEFAULT; - } - opencv::not_opencv_branch_4! { - let default_draw_matches_flags = features2d::DrawMatchesFlags_DEFAULT; - } - features2d::draw_keypoints( - &gray, - &kps, - &mut display, - core::Scalar::all(-1f64), - default_draw_matches_flags, - )?; + features2d::draw_keypoints_def(&gray, &kps, &mut display)?; highgui::imshow(window, &display)?; } if highgui::wait_key(10)? > 0 { diff --git a/examples/video_to_gray.rs b/examples/video_to_gray.rs index 820bb174..9c94ceed 100644 --- a/examples/video_to_gray.rs +++ b/examples/video_to_gray.rs @@ -1,11 +1,11 @@ -use opencv::{highgui, imgproc, prelude::*, videoio, Result}; +use opencv::prelude::*; +use opencv::{highgui, imgproc, videoio, Result}; fn main() -> Result<()> { let window = "video capture"; highgui::named_window(window, 1)?; let mut cam = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; // 0 is the default camera - let opened = videoio::VideoCapture::is_opened(&cam)?; - if !opened { + if !cam.is_opened()? { panic!("Unable to open default camera!"); } loop { @@ -13,7 +13,7 @@ fn main() -> Result<()> { cam.read(&mut frame)?; if frame.size()?.width > 0 { let mut gray = Mat::default(); - imgproc::cvt_color(&frame, &mut gray, imgproc::COLOR_BGR2GRAY, 0)?; + imgproc::cvt_color_def(&frame, &mut gray, imgproc::COLOR_BGR2GRAY)?; highgui::imshow(window, &gray)?; } if highgui::wait_key(10)? > 0 { diff --git a/tests/features2d.rs b/tests/features2d.rs index ecfdf614..7ede9109 100644 --- a/tests/features2d.rs +++ b/tests/features2d.rs @@ -27,22 +27,21 @@ fn orb_bruteforce_match() -> Result<()> { let mut orb = features2d::ORB::create_def()?; let mut kp_a = Vector::new(); let mut des_a = Mat::default(); - orb.detect_and_compute(&img_a, &Mat::default(), &mut kp_a, &mut des_a, false)?; + orb.detect_and_compute_def(&img_a, &Mat::default(), &mut kp_a, &mut des_a)?; let mut kp_b = Vector::new(); let mut des_b = Mat::default(); - orb.detect_and_compute(&img_b, &Mat::default(), &mut kp_b, &mut des_b, false)?; + orb.detect_and_compute_def(&img_b, &Mat::default(), &mut kp_b, &mut des_b)?; - let size = 290; - assert_eq!(size, kp_a.len()); - assert_eq!(Size::new(32, size as i32), des_a.size()?); - assert_eq!(size, kp_b.len()); - assert_eq!(Size::new(32, size as i32), des_b.size()?); + assert_eq!(kp_a.len(), kp_b.len()); + assert_eq!(des_a.size()?, des_b.size()?); + assert_eq!(290, kp_a.len()); + assert_eq!(Size::new(32, 290), des_a.size()?); - let bf_matcher = features2d::BFMatcher::create(NORM_HAMMING, true).unwrap(); + let bf_matcher = features2d::BFMatcher::create(NORM_HAMMING, true)?; - let mut matches = opencv::types::VectorOfDMatch::new(); - bf_matcher.train_match(&des_a, &des_b, &mut matches, &no_array()).unwrap(); + let mut matches = Vector::new(); + bf_matcher.train_match(&des_a, &des_b, &mut matches, &no_array())?; assert_ne!(matches.len(), 0); // expected many matches since images are equal Ok(())