Commit 65bd7faf authored by Zhiping Jiang (蒋志平)'s avatar Zhiping Jiang (蒋志平) 💬

make OpencvExtensions compatible to Opencv 2.4; move KalmanDataTracker to OpencvExtensions

parent bbfa80bd
...@@ -19,4 +19,7 @@ set(CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake" ${CMAKE_MODULE_PATH}) ...@@ -19,4 +19,7 @@ set(CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake" ${CMAKE_MODULE_PATH})
find_package(OpenCV REQUIRED) find_package(OpenCV REQUIRED)
add_subdirectory(OpenCVExtensions) add_subdirectory(OpenCVExtensions)
add_subdirectory(OPENCV_ARUCO)
\ No newline at end of file if (${OpenCV_VERSION} VERSION_GREATER 3.0)
add_subdirectory(OpenCV_ARUCO)
endif()
\ No newline at end of file
...@@ -15,13 +15,13 @@ std::tuple<cv::Vec3d, cv::Vec3d, cv::Matx33d, cv::Matx44d> PoseSolverWrapper::so ...@@ -15,13 +15,13 @@ std::tuple<cv::Vec3d, cv::Vec3d, cv::Matx33d, cv::Matx44d> PoseSolverWrapper::so
cv::Matx33d pose; cv::Matx33d pose;
cv::Matx44d transform; cv::Matx44d transform;
if (solveMethod == PnPSolverMethodIterative) { if (solveMethod == PnPSolverMethodIterative) {
cv::solvePnP(modelPoints, imagePoints, cameraMatrix, distortionEfficient, rvec, tvec, false, cv::SOLVEPNP_ITERATIVE); cv::solvePnP(modelPoints, imagePoints, cameraMatrix, distortionEfficient, rvec, tvec, false, 0); // cv::SOLVEPNP_ITERATIVE
} else if(solveMethod == PnPSolverMethodEPnP) { } else if(solveMethod == PnPSolverMethodEPnP) {
cv::solvePnP(modelPoints, imagePoints, cameraMatrix, distortionEfficient, rvec, tvec, false, cv::SOLVEPNP_EPNP); cv::solvePnP(modelPoints, imagePoints, cameraMatrix, distortionEfficient, rvec, tvec, false, 1); // cv::SOLVEPNP_EPNP
} else if(solveMethod == PnPSolverMethodDLS) { } else if(solveMethod == PnPSolverMethodDLS) {
cv::solvePnP(modelPoints, imagePoints, cameraMatrix, distortionEfficient, rvec, tvec, false, cv::SOLVEPNP_DLS); cv::solvePnP(modelPoints, imagePoints, cameraMatrix, distortionEfficient, rvec, tvec, false, 3); // cv::SOLVEPNP_DLS
} else if(solveMethod == PnPSolverMethodUPnP) { } else if(solveMethod == PnPSolverMethodUPnP) {
cv::solvePnP(modelPoints, imagePoints, cameraMatrix, distortionEfficient, rvec, tvec, false, cv::SOLVEPNP_UPNP); cv::solvePnP(modelPoints, imagePoints, cameraMatrix, distortionEfficient, rvec, tvec, false, 4); // cv::SOLVEPNP_UPNP
} else if(solveMethod == PnPSolverMethodRPP) { } else if(solveMethod == PnPSolverMethodRPP) {
cv::Mat modelPointsMat = cv::Mat(modelPoints).reshape(1).t(); cv::Mat modelPointsMat = cv::Mat(modelPoints).reshape(1).t();
cv::Mat imagePointsMat = cv::Mat(imagePoints).reshape(1).t(); cv::Mat imagePointsMat = cv::Mat(imagePoints).reshape(1).t();
......
...@@ -112,7 +112,7 @@ cv::Matx33d ScreenCameraInfo::cameraMatrix() { ...@@ -112,7 +112,7 @@ cv::Matx33d ScreenCameraInfo::cameraMatrix() {
cv::Matx33d ScreenCameraInfo::cameraMatrixForFrameSize(cv::Size2i frameSize) { cv::Matx33d ScreenCameraInfo::cameraMatrixForFrameSize(cv::Size2i frameSize) {
cv::Point2d frameCenter = cv::Point2d(frameSize.width / 2.0, frameSize.height / 2.0); cv::Point2d frameCenter = cv::Point2d(frameSize.width / 2.0, frameSize.height / 2.0);
cv::Point2d deviationInCurrentSize = cv::Point2d(frameSize.width > frameSize.height ? focalCenterDeviation : cv::Point2d(focalCenterDeviation.y, focalCenterDeviation.x))*MAX(frameSize.width, frameSize.height) / MAX(frontCameraResolution.width, frontCameraResolution.height); cv::Point2d deviationInCurrentSize = cv::Point2d(frameSize.width > frameSize.height ? focalCenterDeviation : cv::Point2d(focalCenterDeviation.y, focalCenterDeviation.x))*(MAX(frameSize.width, frameSize.height) / MAX(frontCameraResolution.width, frontCameraResolution.height));
cv::Point2d focalPoint = frameCenter; cv::Point2d focalPoint = frameCenter;
switch (deviceOrientation) { switch (deviceOrientation) {
case UIDeviceOrientationPortrait: case UIDeviceOrientationPortrait:
......
...@@ -9,10 +9,10 @@ ...@@ -9,10 +9,10 @@
#include "jzplib_core.h" #include "jzplib_core.h"
namespace cve { namespace cve {
cv::Mat cropROIWithBoundaryDetection(const cv::Mat & sourceImage, const cv::Rect2i & roi) { cv::Mat cropROIWithBoundaryDetection(const cv::Mat & sourceImage, const cv::Rect & roi) {
cv::Rect2i intRoi = roi; cv::Rect intRoi = roi;
cv::Rect2i imageRect = cv::Rect2i(0,0,sourceImage.cols, sourceImage.rows); cv::Rect imageRect = cv::Rect(0,0,sourceImage.cols, sourceImage.rows);
cv::Rect2i intersection = imageRect & intRoi; cv::Rect intersection = imageRect & intRoi;
return sourceImage(intersection).clone(); return sourceImage(intersection).clone();
} }
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
namespace cve { namespace cve {
cv::Mat cropRotatedRectFromImage(const cv::RotatedRect & rotatedRect, const cv::Mat & image) { cv::Mat cropRotatedRectFromImage(const cv::RotatedRect & rotatedRect, const cv::Mat & image) {
cv::Rect2f boundingRect = rotatedRect.boundingRect(); cv::Rect_<float> boundingRect = rotatedRect.boundingRect();
boundingRect.x = boundingRect.x >= 0 ? boundingRect.x : 0; boundingRect.x = boundingRect.x >= 0 ? boundingRect.x : 0;
boundingRect.y = boundingRect.y >= 0 ? boundingRect.y : 0; boundingRect.y = boundingRect.y >= 0 ? boundingRect.y : 0;
......
...@@ -16,13 +16,13 @@ namespace cve { ...@@ -16,13 +16,13 @@ namespace cve {
temp.copyTo(imageToBeMasked); temp.copyTo(imageToBeMasked);
} }
cv::Mat imresize(const cv::Mat & original, float scale, cv::InterpolationFlags interpolation) { cv::Mat imresize(const cv::Mat & original, float scale, int interpolation) {
cv::Mat dst; cv::Mat dst;
cv::resize(original, dst, cv::Size(), scale, scale, interpolation); cv::resize(original, dst, cv::Size(), scale, scale, interpolation);
return dst; return dst;
} }
cv::Mat3b imagesc(const cv::Mat & original, cv::ColormapTypes colorMap) { cv::Mat3b imagesc(const cv::Mat & original, int colorMap) {
cv::Mat1b grayColorImage; cv::Mat1b grayColorImage;
cv::Mat3b falseColorsImage; cv::Mat3b falseColorsImage;
cv::normalize(original, grayColorImage, 255.0, 0.0, cv::NORM_MINMAX, CV_8U); cv::normalize(original, grayColorImage, 255.0, 0.0, cv::NORM_MINMAX, CV_8U);
...@@ -37,8 +37,8 @@ namespace cve { ...@@ -37,8 +37,8 @@ namespace cve {
int h = gray_img.rows; int h = gray_img.rows;
cv::Mat wholeFace; cv::Mat wholeFace;
equalizeHist(gray_img, wholeFace); int midX = w/2; equalizeHist(gray_img, wholeFace); int midX = w/2;
cv::Mat leftSide = gray_img(cv::Rect2i(0,0, midX,h)); cv::Mat leftSide = gray_img(cv::Rect(0,0, midX,h));
cv::Mat rightSide = gray_img(cv::Rect2i(midX,0, w-midX,h)); cv::Mat rightSide = gray_img(cv::Rect(midX,0, w-midX,h));
equalizeHist(leftSide, leftSide); equalizeHist(leftSide, leftSide);
equalizeHist(rightSide, rightSide); equalizeHist(rightSide, rightSide);
......
...@@ -29,7 +29,7 @@ namespace cve { ...@@ -29,7 +29,7 @@ namespace cve {
@param interpolation resize interpolation protocol, default is cv::INTER_LINEAR @param interpolation resize interpolation protocol, default is cv::INTER_LINEAR
@return resized image @return resized image
*/ */
cv::Mat imresize(const cv::Mat & original, float scale = 1.0f, cv::InterpolationFlags interpolation = cv::INTER_LINEAR); cv::Mat imresize(const cv::Mat & original, float scale = 1.0f, int interpolation = cv::INTER_LINEAR);
/** /**
A mimic of Matlab imagesc function. A mimic of Matlab imagesc function.
...@@ -38,7 +38,7 @@ namespace cve { ...@@ -38,7 +38,7 @@ namespace cve {
@param colorMap the Colormap for rendering @param colorMap the Colormap for rendering
@return A color image of type Mat3b @return A color image of type Mat3b
*/ */
cv::Mat3b imagesc(const cv::Mat & original, cv::ColormapTypes colorMap = cv::COLORMAP_JET); cv::Mat3b imagesc(const cv::Mat & original, int colorMap = cv::COLORMAP_JET);
cv::Mat luminanceBalancedEqualHist(const cv::Mat & original); cv::Mat luminanceBalancedEqualHist(const cv::Mat & original);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment