Merge remote-tracking branch 'upstream/3.4' into merge-3.4

pull/13474/head
Alexander Alekhin 6 years ago
commit 7fb70e1701
  1. 8
      CMakeLists.txt
  2. 2
      cmake/OpenCVDetectCUDA.cmake
  3. 9
      cmake/OpenCVFindLibsVideo.cmake
  4. 19
      cmake/OpenCVGenSetupVars.cmake
  5. 14
      doc/tutorials/imgproc/histograms/template_matching/template_matching.markdown
  6. 2
      modules/core/include/opencv2/core/eigen.hpp
  7. 2
      modules/core/include/opencv2/core/hal/intrin_vsx.hpp
  8. 8
      modules/core/src/ocl.cpp
  9. 24
      modules/core/test/test_mat.cpp
  10. 4
      modules/imgproc/src/deriv.cpp
  11. 1
      modules/python/CMakeLists.txt
  12. 15
      modules/python/common.cmake
  13. 17
      modules/python/python_loader.cmake
  14. 8
      modules/python/standalone.cmake
  15. 34
      modules/stitching/perf/perf_stich.cpp
  16. 79
      modules/stitching/src/exposure_compensate.cpp
  17. 4
      modules/videoio/CMakeLists.txt
  18. 16
      modules/videoio/src/cap_dc1394_v2.cpp
  19. 20
      modules/videoio/src/cap_msmf.cpp
  20. 146
      samples/dnn/text_detection.py

@ -341,9 +341,12 @@ OCV_OPTION(WITH_V4L "Include Video 4 Linux support" ON
OCV_OPTION(WITH_DSHOW "Build VideoIO with DirectShow support" ON OCV_OPTION(WITH_DSHOW "Build VideoIO with DirectShow support" ON
VISIBLE_IF WIN32 AND NOT ARM AND NOT WINRT VISIBLE_IF WIN32 AND NOT ARM AND NOT WINRT
VERIFY HAVE_DSHOW) VERIFY HAVE_DSHOW)
OCV_OPTION(WITH_MSMF "Build VideoIO with Media Foundation support" ON OCV_OPTION(WITH_MSMF "Build VideoIO with Media Foundation support" NOT MINGW
VISIBLE_IF WIN32 VISIBLE_IF WIN32
VERIFY HAVE_MSMF) VERIFY HAVE_MSMF)
OCV_OPTION(WITH_MSMF_DXVA "Enable hardware acceleration in Media Foundation backend" WITH_MSMF
VISIBLE_IF WIN32
VERIFY HAVE_MSMF_DXVA)
OCV_OPTION(WITH_XIMEA "Include XIMEA cameras support" OFF OCV_OPTION(WITH_XIMEA "Include XIMEA cameras support" OFF
VISIBLE_IF NOT ANDROID AND NOT WINRT VISIBLE_IF NOT ANDROID AND NOT WINRT
VERIFY HAVE_XIMEA) VERIFY HAVE_XIMEA)
@ -590,7 +593,7 @@ else()
ocv_update(OPENCV_OTHER_INSTALL_PATH "${CMAKE_INSTALL_DATAROOTDIR}/opencv4") ocv_update(OPENCV_OTHER_INSTALL_PATH "${CMAKE_INSTALL_DATAROOTDIR}/opencv4")
ocv_update(OPENCV_LICENSES_INSTALL_PATH "${CMAKE_INSTALL_DATAROOTDIR}/licenses/opencv4") ocv_update(OPENCV_LICENSES_INSTALL_PATH "${CMAKE_INSTALL_DATAROOTDIR}/licenses/opencv4")
endif() endif()
ocv_update(OPENCV_PYTHON_INSTALL_PATH "python") #ocv_update(OPENCV_PYTHON_INSTALL_PATH "python") # no default value, see https://github.com/opencv/opencv/issues/13202
endif() endif()
ocv_update(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${OPENCV_LIB_INSTALL_PATH}") ocv_update(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${OPENCV_LIB_INSTALL_PATH}")
@ -1447,6 +1450,7 @@ endif()
if(WITH_MSMF OR HAVE_MSMF) if(WITH_MSMF OR HAVE_MSMF)
status(" Media Foundation:" HAVE_MSMF THEN YES ELSE NO) status(" Media Foundation:" HAVE_MSMF THEN YES ELSE NO)
status(" DXVA:" HAVE_MSMF_DXVA THEN YES ELSE NO)
endif() endif()
if(WITH_XIMEA OR HAVE_XIMEA) if(WITH_XIMEA OR HAVE_XIMEA)

@ -52,7 +52,7 @@ if(CUDA_FOUND)
message(STATUS "CUDA detected: " ${CUDA_VERSION}) message(STATUS "CUDA detected: " ${CUDA_VERSION})
set(_generations "Fermi" "Kepler" "Maxwell" "Pascal" "Volta") set(_generations "Fermi" "Kepler" "Maxwell" "Pascal" "Volta" "Turing")
if(NOT CMAKE_CROSSCOMPILING) if(NOT CMAKE_CROSSCOMPILING)
list(APPEND _generations "Auto") list(APPEND _generations "Auto")
endif() endif()

@ -246,14 +246,15 @@ endif(WITH_DSHOW)
ocv_clear_vars(HAVE_MSMF) ocv_clear_vars(HAVE_MSMF)
if(WITH_MSMF) if(WITH_MSMF)
check_include_file(Mfapi.h HAVE_MSMF) check_include_file(Mfapi.h HAVE_MSMF)
set(HAVE_MSMF_DXVA "")
if(WITH_MSMF_DXVA)
check_include_file(D3D11.h D3D11_found) check_include_file(D3D11.h D3D11_found)
check_include_file(D3d11_4.h D3D11_4_found) check_include_file(D3d11_4.h D3D11_4_found)
if(D3D11_found AND D3D11_4_found) if(D3D11_found AND D3D11_4_found)
set(HAVE_DXVA YES) set(HAVE_MSMF_DXVA YES)
else() endif()
set(HAVE_DXVA NO) endif()
endif() endif()
endif(WITH_MSMF)
# --- Extra HighGUI and VideoIO libs on Windows --- # --- Extra HighGUI and VideoIO libs on Windows ---
if(WIN32) if(WIN32)

@ -43,11 +43,24 @@ else()
endif() endif()
file(RELATIVE_PATH OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG file(RELATIVE_PATH OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG
"${CMAKE_INSTALL_PREFIX}/${OPENCV_SETUPVARS_INSTALL_PATH}/" "${CMAKE_INSTALL_PREFIX}/") "${CMAKE_INSTALL_PREFIX}/${OPENCV_SETUPVARS_INSTALL_PATH}/" "${CMAKE_INSTALL_PREFIX}/")
if(IS_ABSOLUTE "${OPENCV_PYTHON_INSTALL_PATH}") if(DEFINED OPENCV_PYTHON_INSTALL_PATH)
set(OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG "${OPENCV_PYTHON_INSTALL_PATH}") set(__python_path "${OPENCV_PYTHON_INSTALL_PATH}")
elseif(DEFINED OPENCV_PYTHON_INSTALL_PATH_SETUPVARS)
set(__python_path "${OPENCV_PYTHON_INSTALL_PATH_SETUPVARS}")
endif()
if(DEFINED __python_path)
if(IS_ABSOLUTE "${__python_path}")
set(OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG "${__python_path}")
message(WARNING "CONFIGURATION IS NOT SUPPORTED: validate setupvars script in install directory") message(WARNING "CONFIGURATION IS NOT SUPPORTED: validate setupvars script in install directory")
else() else()
ocv_path_join(OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG "${OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG}" "${OPENCV_PYTHON_INSTALL_PATH}") ocv_path_join(OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG "${OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG}" "${__python_path}")
endif()
else()
if(DEFINED OPENCV_PYTHON3_INSTALL_PATH)
ocv_path_join(OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG "${OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG}" "${OPENCV_PYTHON3_INSTALL_PATH}")
else()
set(OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG "python_loader_is_not_installed")
endif()
endif() endif()
configure_file("${OpenCV_SOURCE_DIR}/cmake/templates/${OPENCV_SETUPVARS_TEMPLATE}" "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/install/${OPENCV_SETUPVARS_FILENAME}" @ONLY) configure_file("${OpenCV_SOURCE_DIR}/cmake/templates/${OPENCV_SETUPVARS_TEMPLATE}" "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/install/${OPENCV_SETUPVARS_FILENAME}" @ONLY)
install(FILES "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/install/${OPENCV_SETUPVARS_FILENAME}" install(FILES "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/install/${OPENCV_SETUPVARS_FILENAME}"

@ -65,7 +65,7 @@ that should be used to find the match.
-# **Mask image (M):** The mask, a grayscale image that masks the template -# **Mask image (M):** The mask, a grayscale image that masks the template
- Only two matching methods currently accept a mask: CV_TM_SQDIFF and CV_TM_CCORR_NORMED (see - Only two matching methods currently accept a mask: TM_SQDIFF and TM_CCORR_NORMED (see
below for explanation of all the matching methods available in opencv). below for explanation of all the matching methods available in opencv).
@ -86,23 +86,23 @@ that should be used to find the match.
Good question. OpenCV implements Template matching in the function **matchTemplate()**. The Good question. OpenCV implements Template matching in the function **matchTemplate()**. The
available methods are 6: available methods are 6:
-# **method=CV_TM_SQDIFF** -# **method=TM_SQDIFF**
\f[R(x,y)= \sum _{x',y'} (T(x',y')-I(x+x',y+y'))^2\f] \f[R(x,y)= \sum _{x',y'} (T(x',y')-I(x+x',y+y'))^2\f]
-# **method=CV_TM_SQDIFF_NORMED** -# **method=TM_SQDIFF_NORMED**
\f[R(x,y)= \frac{\sum_{x',y'} (T(x',y')-I(x+x',y+y'))^2}{\sqrt{\sum_{x',y'}T(x',y')^2 \cdot \sum_{x',y'} I(x+x',y+y')^2}}\f] \f[R(x,y)= \frac{\sum_{x',y'} (T(x',y')-I(x+x',y+y'))^2}{\sqrt{\sum_{x',y'}T(x',y')^2 \cdot \sum_{x',y'} I(x+x',y+y')^2}}\f]
-# **method=CV_TM_CCORR** -# **method=TM_CCORR**
\f[R(x,y)= \sum _{x',y'} (T(x',y') \cdot I(x+x',y+y'))\f] \f[R(x,y)= \sum _{x',y'} (T(x',y') \cdot I(x+x',y+y'))\f]
-# **method=CV_TM_CCORR_NORMED** -# **method=TM_CCORR_NORMED**
\f[R(x,y)= \frac{\sum_{x',y'} (T(x',y') \cdot I(x+x',y+y'))}{\sqrt{\sum_{x',y'}T(x',y')^2 \cdot \sum_{x',y'} I(x+x',y+y')^2}}\f] \f[R(x,y)= \frac{\sum_{x',y'} (T(x',y') \cdot I(x+x',y+y'))}{\sqrt{\sum_{x',y'}T(x',y')^2 \cdot \sum_{x',y'} I(x+x',y+y')^2}}\f]
-# **method=CV_TM_CCOEFF** -# **method=TM_CCOEFF**
\f[R(x,y)= \sum _{x',y'} (T'(x',y') \cdot I'(x+x',y+y'))\f] \f[R(x,y)= \sum _{x',y'} (T'(x',y') \cdot I'(x+x',y+y'))\f]
@ -110,7 +110,7 @@ available methods are 6:
\f[\begin{array}{l} T'(x',y')=T(x',y') - 1/(w \cdot h) \cdot \sum _{x'',y''} T(x'',y'') \\ I'(x+x',y+y')=I(x+x',y+y') - 1/(w \cdot h) \cdot \sum _{x'',y''} I(x+x'',y+y'') \end{array}\f] \f[\begin{array}{l} T'(x',y')=T(x',y') - 1/(w \cdot h) \cdot \sum _{x'',y''} T(x'',y'') \\ I'(x+x',y+y')=I(x+x',y+y') - 1/(w \cdot h) \cdot \sum _{x'',y''} I(x+x'',y+y'') \end{array}\f]
-# **method=CV_TM_CCOEFF_NORMED** -# **method=TM_CCOEFF_NORMED**
\f[R(x,y)= \frac{ \sum_{x',y'} (T'(x',y') \cdot I'(x+x',y+y')) }{ \sqrt{\sum_{x',y'}T'(x',y')^2 \cdot \sum_{x',y'} I'(x+x',y+y')^2} }\f] \f[R(x,y)= \frac{ \sum_{x',y'} (T'(x',y') \cdot I'(x+x',y+y')) }{ \sqrt{\sum_{x',y'}T'(x',y')^2 \cdot \sum_{x',y'} I'(x+x',y+y')^2} }\f]

@ -60,7 +60,7 @@ namespace cv
//! @{ //! @{
template<typename _Tp, int _rows, int _cols, int _options, int _maxRows, int _maxCols> static inline template<typename _Tp, int _rows, int _cols, int _options, int _maxRows, int _maxCols> static inline
void eigen2cv( const Eigen::Matrix<_Tp, _rows, _cols, _options, _maxRows, _maxCols>& src, Mat& dst ) void eigen2cv( const Eigen::Matrix<_Tp, _rows, _cols, _options, _maxRows, _maxCols>& src, OutputArray dst )
{ {
if( !(src.Flags & Eigen::RowMajorBit) ) if( !(src.Flags & Eigen::RowMajorBit) )
{ {

@ -718,7 +718,7 @@ OPENCV_HAL_IMPL_VSX_REDUCE_OP_4(v_float32x4, vec_float4, float, min, vec_min)
inline double v_reduce_sum(const v_float64x2& a) inline double v_reduce_sum(const v_float64x2& a)
{ {
return vec_extract(vec_add(a.val, vec_sld(a.val, a.val, 8)), 0); return vec_extract(vec_add(a.val, vec_permi(a.val, a.val, 3)), 0);
} }
#define OPENCV_HAL_IMPL_VSX_REDUCE_OP_8(_Tpvec, _Tpvec2, scalartype, suffix, func) \ #define OPENCV_HAL_IMPL_VSX_REDUCE_OP_8(_Tpvec, _Tpvec2, scalartype, suffix, func) \

@ -1257,6 +1257,14 @@ struct Device::Impl
else else
vendorID_ = UNKNOWN_VENDOR; vendorID_ = UNKNOWN_VENDOR;
const size_t CV_OPENCL_DEVICE_MAX_WORK_GROUP_SIZE = utils::getConfigurationParameterSizeT("OPENCV_OPENCL_DEVICE_MAX_WORK_GROUP_SIZE", 0);
if (CV_OPENCL_DEVICE_MAX_WORK_GROUP_SIZE > 0)
{
const size_t new_maxWorkGroupSize = std::min(maxWorkGroupSize_, CV_OPENCL_DEVICE_MAX_WORK_GROUP_SIZE);
if (new_maxWorkGroupSize != maxWorkGroupSize_)
CV_LOG_WARNING(NULL, "OpenCL: using workgroup size: " << new_maxWorkGroupSize << " (was " << maxWorkGroupSize_ << ")");
maxWorkGroupSize_ = new_maxWorkGroupSize;
}
#if 0 #if 0
if (isExtensionSupported("cl_khr_spir")) if (isExtensionSupported("cl_khr_spir"))
{ {

@ -3,6 +3,12 @@
// of this distribution and at http://opencv.org/license.html. // of this distribution and at http://opencv.org/license.html.
#include "test_precomp.hpp" #include "test_precomp.hpp"
#ifdef HAVE_EIGEN
#include <Eigen/Core>
#include <Eigen/Dense>
#include "opencv2/core/eigen.hpp"
#endif
namespace opencv_test { namespace { namespace opencv_test { namespace {
class Core_ReduceTest : public cvtest::BaseTest class Core_ReduceTest : public cvtest::BaseTest
@ -1962,4 +1968,22 @@ TEST(Core_Vectors, issue_13078_workaround)
ASSERT_EQ(7, ints[3]); ASSERT_EQ(7, ints[3]);
} }
#ifdef HAVE_EIGEN
TEST(Core_Eigen, eigen2cv_check_Mat_type)
{
Mat A(4, 4, CV_32FC1, Scalar::all(0));
Eigen::MatrixXf eigen_A;
cv2eigen(A, eigen_A);
Mat_<float> f_mat;
EXPECT_NO_THROW(eigen2cv(eigen_A, f_mat));
EXPECT_EQ(CV_32FC1, f_mat.type());
Mat_<double> d_mat;
EXPECT_ANY_THROW(eigen2cv(eigen_A, d_mat));
//EXPECT_EQ(CV_64FC1, d_mat.type());
}
#endif // HAVE_EIGEN
}} // namespace }} // namespace

@ -441,7 +441,7 @@ void cv::Sobel( InputArray _src, OutputArray _dst, int ddepth, int dx, int dy,
ocl_sepFilter3x3_8UC1(_src, _dst, ddepth, kx, ky, delta, borderType)); ocl_sepFilter3x3_8UC1(_src, _dst, ddepth, kx, ky, delta, borderType));
CV_OCL_RUN(ocl::isOpenCLActivated() && _dst.isUMat() && _src.dims() <= 2 && (size_t)_src.rows() > kx.total() && (size_t)_src.cols() > kx.total(), CV_OCL_RUN(ocl::isOpenCLActivated() && _dst.isUMat() && _src.dims() <= 2 && (size_t)_src.rows() > kx.total() && (size_t)_src.cols() > kx.total(),
ocl_sepFilter2D(_src, _dst, ddepth, kx, ky, Point(-1, -1), 0, borderType)) ocl_sepFilter2D(_src, _dst, ddepth, kx, ky, Point(-1, -1), delta, borderType))
Mat src = _src.getMat(); Mat src = _src.getMat();
Mat dst = _dst.getMat(); Mat dst = _dst.getMat();
@ -494,7 +494,7 @@ void cv::Scharr( InputArray _src, OutputArray _dst, int ddepth, int dx, int dy,
CV_OCL_RUN(ocl::isOpenCLActivated() && _dst.isUMat() && _src.dims() <= 2 && CV_OCL_RUN(ocl::isOpenCLActivated() && _dst.isUMat() && _src.dims() <= 2 &&
(size_t)_src.rows() > kx.total() && (size_t)_src.cols() > kx.total(), (size_t)_src.rows() > kx.total() && (size_t)_src.cols() > kx.total(),
ocl_sepFilter2D(_src, _dst, ddepth, kx, ky, Point(-1, -1), 0, borderType)) ocl_sepFilter2D(_src, _dst, ddepth, kx, ky, Point(-1, -1), delta, borderType))
Mat src = _src.getMat(); Mat src = _src.getMat();
Mat dst = _dst.getMat(); Mat dst = _dst.getMat();

@ -20,6 +20,7 @@ add_subdirectory(bindings)
if(NOT OPENCV_SKIP_PYTHON_LOADER) if(NOT OPENCV_SKIP_PYTHON_LOADER)
include("./python_loader.cmake") include("./python_loader.cmake")
message(STATUS "OpenCV Python: during development append to PYTHONPATH: ${CMAKE_BINARY_DIR}/python_loader")
endif() endif()
if(__disable_python2) if(__disable_python2)

@ -120,6 +120,21 @@ if(NOT OPENCV_SKIP_PYTHON_LOADER)
set(__python_loader_subdir "cv2/") set(__python_loader_subdir "cv2/")
endif() endif()
if(NOT " ${PYTHON}" STREQUAL " PYTHON"
AND NOT DEFINED OPENCV_PYTHON_INSTALL_PATH
)
if(DEFINED OPENCV_${PYTHON}_INSTALL_PATH)
set(OPENCV_PYTHON_INSTALL_PATH "${OPENCV_${PYTHON}_INSTALL_PATH}")
elseif(NOT OPENCV_SKIP_PYTHON_LOADER)
set(OPENCV_PYTHON_INSTALL_PATH "${${PYTHON}_PACKAGES_PATH}")
endif()
endif()
if(NOT OPENCV_SKIP_PYTHON_LOADER AND DEFINED OPENCV_PYTHON_INSTALL_PATH)
include("${CMAKE_CURRENT_LIST_DIR}/python_loader.cmake")
set(OPENCV_PYTHON_INSTALL_PATH_SETUPVARS "${OPENCV_PYTHON_INSTALL_PATH}" CACHE INTERNAL "")
endif()
if(NOT " ${PYTHON}" STREQUAL " PYTHON" AND DEFINED OPENCV_${PYTHON}_INSTALL_PATH) if(NOT " ${PYTHON}" STREQUAL " PYTHON" AND DEFINED OPENCV_${PYTHON}_INSTALL_PATH)
set(__python_binary_install_path "${OPENCV_${PYTHON}_INSTALL_PATH}") set(__python_binary_install_path "${OPENCV_${PYTHON}_INSTALL_PATH}")
elseif(OPENCV_SKIP_PYTHON_LOADER AND DEFINED ${PYTHON}_PACKAGES_PATH) elseif(OPENCV_SKIP_PYTHON_LOADER AND DEFINED ${PYTHON}_PACKAGES_PATH)

@ -2,14 +2,15 @@ ocv_assert(NOT OPENCV_SKIP_PYTHON_LOADER)
set(PYTHON_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}") set(PYTHON_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}")
ocv_assert(DEFINED OPENCV_PYTHON_INSTALL_PATH)
if(OpenCV_FOUND) if(OpenCV_FOUND)
set(__loader_path "${OpenCV_BINARY_DIR}/python_loader") set(__loader_path "${OpenCV_BINARY_DIR}/python_loader")
message(STATUS "OpenCV Python: during development append to PYTHONPATH: ${__loader_path}")
else() else()
set(__loader_path "${CMAKE_BINARY_DIR}/python_loader") set(__loader_path "${CMAKE_BINARY_DIR}/python_loader")
endif() endif()
set(__python_loader_install_tmp_path "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/install/python_loader/") set(__python_loader_install_tmp_path "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/install/python_loader/")
if(DEFINED OPENCV_PYTHON_INSTALL_PATH)
if(IS_ABSOLUTE "${OPENCV_PYTHON_INSTALL_PATH}") if(IS_ABSOLUTE "${OPENCV_PYTHON_INSTALL_PATH}")
set(OpenCV_PYTHON_INSTALL_PATH_RELATIVE_CONFIGCMAKE "${CMAKE_INSTALL_PREFIX}/") set(OpenCV_PYTHON_INSTALL_PATH_RELATIVE_CONFIGCMAKE "${CMAKE_INSTALL_PREFIX}/")
set(CMAKE_PYTHON_EXTENSION_INSTALL_PATH_BASE "'${CMAKE_INSTALL_PREFIX}'") set(CMAKE_PYTHON_EXTENSION_INSTALL_PATH_BASE "'${CMAKE_INSTALL_PREFIX}'")
@ -17,6 +18,9 @@ else()
file(RELATIVE_PATH OpenCV_PYTHON_INSTALL_PATH_RELATIVE_CONFIGCMAKE "${CMAKE_INSTALL_PREFIX}/${OPENCV_PYTHON_INSTALL_PATH}/cv2" ${CMAKE_INSTALL_PREFIX}) file(RELATIVE_PATH OpenCV_PYTHON_INSTALL_PATH_RELATIVE_CONFIGCMAKE "${CMAKE_INSTALL_PREFIX}/${OPENCV_PYTHON_INSTALL_PATH}/cv2" ${CMAKE_INSTALL_PREFIX})
set(CMAKE_PYTHON_EXTENSION_INSTALL_PATH_BASE "os.path.join(LOADER_DIR, '${OpenCV_PYTHON_INSTALL_PATH_RELATIVE_CONFIGCMAKE}')") set(CMAKE_PYTHON_EXTENSION_INSTALL_PATH_BASE "os.path.join(LOADER_DIR, '${OpenCV_PYTHON_INSTALL_PATH_RELATIVE_CONFIGCMAKE}')")
endif() endif()
else()
set(CMAKE_PYTHON_EXTENSION_INSTALL_PATH_BASE "os.path.join(LOADER_DIR, 'not_installed')")
endif()
set(PYTHON_LOADER_FILES set(PYTHON_LOADER_FILES
"setup.py" "cv2/__init__.py" "setup.py" "cv2/__init__.py"
@ -25,7 +29,13 @@ set(PYTHON_LOADER_FILES
foreach(fname ${PYTHON_LOADER_FILES}) foreach(fname ${PYTHON_LOADER_FILES})
get_filename_component(__dir "${fname}" DIRECTORY) get_filename_component(__dir "${fname}" DIRECTORY)
file(COPY "${PYTHON_SOURCE_DIR}/package/${fname}" DESTINATION "${__loader_path}/${__dir}") file(COPY "${PYTHON_SOURCE_DIR}/package/${fname}" DESTINATION "${__loader_path}/${__dir}")
if(fname STREQUAL "setup.py")
if(OPENCV_PYTHON_SETUP_PY_INSTALL_PATH)
install(FILES "${PYTHON_SOURCE_DIR}/package/${fname}" DESTINATION "${OPENCV_PYTHON_SETUP_PY_INSTALL_PATH}" COMPONENT python)
endif()
elseif(DEFINED OPENCV_PYTHON_INSTALL_PATH)
install(FILES "${PYTHON_SOURCE_DIR}/package/${fname}" DESTINATION "${OPENCV_PYTHON_INSTALL_PATH}/${__dir}" COMPONENT python) install(FILES "${PYTHON_SOURCE_DIR}/package/${fname}" DESTINATION "${OPENCV_PYTHON_INSTALL_PATH}/${__dir}" COMPONENT python)
endif()
endforeach() endforeach()
if(NOT OpenCV_FOUND) # Ignore "standalone" builds of Python bindings if(NOT OpenCV_FOUND) # Ignore "standalone" builds of Python bindings
@ -41,6 +51,8 @@ if(NOT OpenCV_FOUND) # Ignore "standalone" builds of Python bindings
string(REPLACE ";" ",\n " CMAKE_PYTHON_BINARIES_PATH "${CMAKE_PYTHON_BINARIES_PATH}") string(REPLACE ";" ",\n " CMAKE_PYTHON_BINARIES_PATH "${CMAKE_PYTHON_BINARIES_PATH}")
configure_file("${PYTHON_SOURCE_DIR}/package/template/config.py.in" "${__loader_path}/cv2/config.py" @ONLY) configure_file("${PYTHON_SOURCE_DIR}/package/template/config.py.in" "${__loader_path}/cv2/config.py" @ONLY)
# install
if(DEFINED OPENCV_PYTHON_INSTALL_PATH)
if(WIN32) if(WIN32)
list(APPEND CMAKE_PYTHON_BINARIES_INSTALL_PATH "os.path.join(${CMAKE_PYTHON_EXTENSION_INSTALL_PATH_BASE}, '${OPENCV_BIN_INSTALL_PATH}')") list(APPEND CMAKE_PYTHON_BINARIES_INSTALL_PATH "os.path.join(${CMAKE_PYTHON_EXTENSION_INSTALL_PATH_BASE}, '${OPENCV_BIN_INSTALL_PATH}')")
else() else()
@ -49,6 +61,5 @@ if(NOT OpenCV_FOUND) # Ignore "standalone" builds of Python bindings
string(REPLACE ";" ",\n " CMAKE_PYTHON_BINARIES_PATH "${CMAKE_PYTHON_BINARIES_INSTALL_PATH}") string(REPLACE ";" ",\n " CMAKE_PYTHON_BINARIES_PATH "${CMAKE_PYTHON_BINARIES_INSTALL_PATH}")
configure_file("${PYTHON_SOURCE_DIR}/package/template/config.py.in" "${__python_loader_install_tmp_path}/cv2/config.py" @ONLY) configure_file("${PYTHON_SOURCE_DIR}/package/template/config.py.in" "${__python_loader_install_tmp_path}/cv2/config.py" @ONLY)
install(FILES "${__python_loader_install_tmp_path}/cv2/config.py" DESTINATION "${OPENCV_PYTHON_INSTALL_PATH}/cv2/" COMPONENT python) install(FILES "${__python_loader_install_tmp_path}/cv2/config.py" DESTINATION "${OPENCV_PYTHON_INSTALL_PATH}/cv2/" COMPONENT python)
endif()
message(STATUS "OpenCV Python: during development append to PYTHONPATH: ${__loader_path}")
endif() endif()

@ -3,7 +3,13 @@ if(NOT DEFINED OpenCV_BINARY_DIR)
endif() endif()
include("${OpenCV_BINARY_DIR}/opencv_python_config.cmake") include("${OpenCV_BINARY_DIR}/opencv_python_config.cmake")
if(NOT DEFINED OpenCV_SOURCE_DIR) if(NOT DEFINED OpenCV_SOURCE_DIR)
message(FATAL_ERROR "Missing define of OpenCV_SOURCE_DIR") message(FATAL_ERROR "Missing OpenCV_SOURCE_DIR")
endif()
if(NOT OPENCV_PYTHON_INSTALL_PATH)
if(NOT DEFINED OPENCV_PYTHON_STANDALONE_INSTALL_PATH)
message(FATAL_ERROR "Missing OPENCV_PYTHON_STANDALONE_INSTALL_PATH / OPENCV_PYTHON_INSTALL_PATH")
endif()
set(OPENCV_PYTHON_INSTALL_PATH "${OPENCV_PYTHON_STANDALONE_INSTALL_PATH}")
endif() endif()
include("${OpenCV_SOURCE_DIR}/cmake/OpenCVUtils.cmake") include("${OpenCV_SOURCE_DIR}/cmake/OpenCVUtils.cmake")

@ -13,6 +13,7 @@ using namespace perf;
#define WORK_MEGAPIX 0.6 #define WORK_MEGAPIX 0.6
typedef TestBaseWithParam<string> stitch; typedef TestBaseWithParam<string> stitch;
typedef TestBaseWithParam<int> stitchExposureCompensation;
typedef TestBaseWithParam<tuple<string, string> > stitchDatasets; typedef TestBaseWithParam<tuple<string, string> > stitchDatasets;
#ifdef HAVE_OPENCV_XFEATURES2D #ifdef HAVE_OPENCV_XFEATURES2D
@ -20,6 +21,7 @@ typedef TestBaseWithParam<tuple<string, string> > stitchDatasets;
#else #else
#define TEST_DETECTORS testing::Values("orb", "akaze") #define TEST_DETECTORS testing::Values("orb", "akaze")
#endif #endif
#define TEST_EXP_COMP_BS testing::Values(32, 16, 12, 10, 8)
#define AFFINE_DATASETS testing::Values("s", "budapest", "newspaper", "prague") #define AFFINE_DATASETS testing::Values("s", "budapest", "newspaper", "prague")
PERF_TEST_P(stitch, a123, TEST_DETECTORS) PERF_TEST_P(stitch, a123, TEST_DETECTORS)
@ -58,6 +60,38 @@ PERF_TEST_P(stitch, a123, TEST_DETECTORS)
SANITY_CHECK_NOTHING(); SANITY_CHECK_NOTHING();
} }
PERF_TEST_P(stitchExposureCompensation, a123, TEST_EXP_COMP_BS)
{
Mat pano;
vector<Mat> imgs;
imgs.push_back( imread( getDataPath("stitching/a1.png") ) );
imgs.push_back( imread( getDataPath("stitching/a2.png") ) );
imgs.push_back( imread( getDataPath("stitching/a3.png") ) );
int bs = GetParam();
declare.time(30 * 10).iterations(10);
while(next())
{
Ptr<Stitcher> stitcher = Stitcher::create();
stitcher->setWarper(makePtr<SphericalWarper>());
stitcher->setRegistrationResol(WORK_MEGAPIX);
stitcher->setExposureCompensator(
makePtr<detail::BlocksGainCompensator>(bs, bs));
startTimer();
stitcher->stitch(imgs, pano);
stopTimer();
}
EXPECT_NEAR(pano.size().width, 1182, 50);
EXPECT_NEAR(pano.size().height, 682, 30);
SANITY_CHECK_NOTHING();
}
PERF_TEST_P(stitch, b12, TEST_DETECTORS) PERF_TEST_P(stitch, b12, TEST_DETECTORS)
{ {
Mat pano; Mat pano;

@ -41,6 +41,10 @@
//M*/ //M*/
#include "precomp.hpp" #include "precomp.hpp"
#ifdef HAVE_EIGEN
#include <Eigen/Core>
#include <Eigen/Dense>
#endif
namespace cv { namespace cv {
namespace detail { namespace detail {
@ -86,6 +90,7 @@ void GainCompensator::feed(const std::vector<Point> &corners, const std::vector<
const int num_images = static_cast<int>(images.size()); const int num_images = static_cast<int>(images.size());
Mat_<int> N(num_images, num_images); N.setTo(0); Mat_<int> N(num_images, num_images); N.setTo(0);
Mat_<double> I(num_images, num_images); I.setTo(0); Mat_<double> I(num_images, num_images); I.setTo(0);
Mat_<bool> skip(num_images, 1); skip.setTo(true);
//Rect dst_roi = resultRoi(corners, images); //Rect dst_roi = resultRoi(corners, images);
Mat subimg1, subimg2; Mat subimg1, subimg2;
@ -105,7 +110,19 @@ void GainCompensator::feed(const std::vector<Point> &corners, const std::vector<
submask2 = masks[j].first(Rect(roi.tl() - corners[j], roi.br() - corners[j])).getMat(ACCESS_READ); submask2 = masks[j].first(Rect(roi.tl() - corners[j], roi.br() - corners[j])).getMat(ACCESS_READ);
intersect = (submask1 == masks[i].second) & (submask2 == masks[j].second); intersect = (submask1 == masks[i].second) & (submask2 == masks[j].second);
N(i, j) = N(j, i) = std::max(1, countNonZero(intersect)); int intersect_count = countNonZero(intersect);
N(i, j) = N(j, i) = std::max(1, intersect_count);
// Don't compute Isums if subimages do not intersect anyway
if (intersect_count == 0)
continue;
// Don't skip images that intersect with at least one other image
if (i != j)
{
skip(i, 0) = false;
skip(j, 0) = false;
}
double Isum1 = 0, Isum2 = 0; double Isum1 = 0, Isum2 = 0;
for (int y = 0; y < roi.height; ++y) for (int y = 0; y < roi.height; ++y)
@ -130,22 +147,62 @@ void GainCompensator::feed(const std::vector<Point> &corners, const std::vector<
{ {
double alpha = 0.01; double alpha = 0.01;
double beta = 100; double beta = 100;
int num_eq = num_images - countNonZero(skip);
Mat_<double> A(num_images, num_images); A.setTo(0); Mat_<double> A(num_eq, num_eq); A.setTo(0);
Mat_<double> b(num_images, 1); b.setTo(0); Mat_<double> b(num_eq, 1); b.setTo(0);
for (int i = 0; i < num_images; ++i) for (int i = 0, ki = 0; i < num_images; ++i)
{ {
for (int j = 0; j < num_images; ++j) if (skip(i, 0))
continue;
for (int j = 0, kj = 0; j < num_images; ++j)
{ {
b(i, 0) += beta * N(i, j); if (skip(j, 0))
A(i, i) += beta * N(i, j); continue;
if (j == i) continue;
A(i, i) += 2 * alpha * I(i, j) * I(i, j) * N(i, j); b(ki, 0) += beta * N(i, j);
A(i, j) -= 2 * alpha * I(i, j) * I(j, i) * N(i, j); A(ki, ki) += beta * N(i, j);
if (j != i)
{
A(ki, ki) += 2 * alpha * I(i, j) * I(i, j) * N(i, j);
A(ki, kj) -= 2 * alpha * I(i, j) * I(j, i) * N(i, j);
}
++kj;
} }
++ki;
} }
solve(A, b, gains_); Mat_<double> l_gains;
#ifdef HAVE_EIGEN
Eigen::MatrixXf eigen_A, eigen_b, eigen_x;
cv2eigen(A, eigen_A);
cv2eigen(b, eigen_b);
Eigen::LLT<Eigen::MatrixXf> solver(eigen_A);
#if ENABLE_LOG
if (solver.info() != Eigen::ComputationInfo::Success)
LOGLN("Failed to solve exposure compensation system");
#endif
eigen_x = solver.solve(eigen_b);
Mat_<float> l_gains_float;
eigen2cv(eigen_x, l_gains_float);
l_gains_float.convertTo(l_gains, CV_64FC1);
#else
solve(A, b, l_gains);
#endif
CV_CheckTypeEQ(l_gains.type(), CV_64FC1, "");
gains_.create(num_images, 1);
for (int i = 0, j = 0; i < num_images; ++i)
{
if (skip(i, 0))
gains_.at<double>(i, 0) = 1;
else
gains_.at<double>(i, 0) = l_gains(j++, 0);
}
} }
LOGLN("Exposure compensation, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); LOGLN("Exposure compensation, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec");

@ -86,8 +86,8 @@ endif()
if(WIN32 AND HAVE_MSMF) if(WIN32 AND HAVE_MSMF)
list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_msmf.hpp) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_msmf.hpp)
list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_msmf.cpp) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_msmf.cpp)
if (HAVE_DXVA) if(HAVE_MSMF_DXVA)
add_definitions(-DHAVE_DXVA) add_definitions(-DHAVE_MSMF_DXVA)
endif() endif()
endif() endif()

@ -183,7 +183,11 @@ CvDC1394::~CvDC1394()
dc = 0; dc = 0;
} }
static CvDC1394& getDC1394()
{
static CvDC1394 dc1394; static CvDC1394 dc1394;
return dc1394;
}
class CvCaptureCAM_DC1394_v2_CPP : public CvCapture class CvCaptureCAM_DC1394_v2_CPP : public CvCapture
{ {
@ -436,7 +440,7 @@ bool CvCaptureCAM_DC1394_v2_CPP::startCapture()
code = dc1394_capture_setup(dcCam, nDMABufs, DC1394_CAPTURE_FLAGS_DEFAULT); code = dc1394_capture_setup(dcCam, nDMABufs, DC1394_CAPTURE_FLAGS_DEFAULT);
if (code >= 0) if (code >= 0)
{ {
FD_SET(dc1394_capture_get_fileno(dcCam), &dc1394.camFds); FD_SET(dc1394_capture_get_fileno(dcCam), &getDC1394().camFds);
dc1394_video_set_transmission(dcCam, DC1394_ON); dc1394_video_set_transmission(dcCam, DC1394_ON);
started = true; started = true;
} }
@ -452,15 +456,15 @@ bool CvCaptureCAM_DC1394_v2_CPP::open(int index)
close(); close();
if (!dc1394.dc) if (!getDC1394().dc)
goto _exit_; goto _exit_;
err = dc1394_camera_enumerate(dc1394.dc, &cameraList); err = dc1394_camera_enumerate(getDC1394().dc, &cameraList);
if (err < 0 || !cameraList || (unsigned)index >= (unsigned)cameraList->num) if (err < 0 || !cameraList || (unsigned)index >= (unsigned)cameraList->num)
goto _exit_; goto _exit_;
guid = cameraList->ids[index].guid; guid = cameraList->ids[index].guid;
dcCam = dc1394_camera_new(dc1394.dc, guid); dcCam = dc1394_camera_new(getDC1394().dc, guid);
if (!dcCam) if (!dcCam)
goto _exit_; goto _exit_;
@ -485,8 +489,8 @@ void CvCaptureCAM_DC1394_v2_CPP::close()
// check for fileno valid before using // check for fileno valid before using
int fileno=dc1394_capture_get_fileno(dcCam); int fileno=dc1394_capture_get_fileno(dcCam);
if (fileno>=0 && FD_ISSET(fileno, &dc1394.camFds)) if (fileno>=0 && FD_ISSET(fileno, &getDC1394().camFds))
FD_CLR(fileno, &dc1394.camFds); FD_CLR(fileno, &getDC1394().camFds);
dc1394_video_set_transmission(dcCam, DC1394_OFF); dc1394_video_set_transmission(dcCam, DC1394_OFF);
dc1394_capture_stop(dcCam); dc1394_capture_stop(dcCam);
dc1394_camera_free(dcCam); dc1394_camera_free(dcCam);

@ -55,15 +55,15 @@
#include <windows.h> #include <windows.h>
#include <guiddef.h> #include <guiddef.h>
#include <mfidl.h> #include <mfidl.h>
#include <Mfapi.h> #include <mfapi.h>
#include <mfplay.h> #include <mfplay.h>
#include <mfobjects.h> #include <mfobjects.h>
#include <tchar.h> #include <tchar.h>
#include <strsafe.h> #include <strsafe.h>
#include <Mfreadwrite.h> #include <Mfreadwrite.h>
#ifdef HAVE_DXVA #ifdef HAVE_MSMF_DXVA
#include <D3D11.h> #include <d3d11.h>
#include <D3d11_4.h> #include <d3d11_4.h>
#endif #endif
#include <new> #include <new>
#include <map> #include <map>
@ -81,7 +81,7 @@
#pragma comment(lib, "mfuuid") #pragma comment(lib, "mfuuid")
#pragma comment(lib, "Strmiids") #pragma comment(lib, "Strmiids")
#pragma comment(lib, "Mfreadwrite") #pragma comment(lib, "Mfreadwrite")
#ifdef HAVE_DXVA #ifdef HAVE_MSMF_DXVA
#pragma comment(lib, "d3d11") #pragma comment(lib, "d3d11")
// MFCreateDXGIDeviceManager() is available since Win8 only. // MFCreateDXGIDeviceManager() is available since Win8 only.
// To avoid OpenCV loading failure on Win7 use dynamic detection of this symbol. // To avoid OpenCV loading failure on Win7 use dynamic detection of this symbol.
@ -715,7 +715,7 @@ protected:
cv::String filename; cv::String filename;
int camid; int camid;
MSMFCapture_Mode captureMode; MSMFCapture_Mode captureMode;
#ifdef HAVE_DXVA #ifdef HAVE_MSMF_DXVA
_ComPtr<ID3D11Device> D3DDev; _ComPtr<ID3D11Device> D3DDev;
_ComPtr<IMFDXGIDeviceManager> D3DMgr; _ComPtr<IMFDXGIDeviceManager> D3DMgr;
#endif #endif
@ -740,7 +740,7 @@ CvCapture_MSMF::CvCapture_MSMF():
filename(""), filename(""),
camid(-1), camid(-1),
captureMode(MODE_SW), captureMode(MODE_SW),
#ifdef HAVE_DXVA #ifdef HAVE_MSMF_DXVA
D3DDev(NULL), D3DDev(NULL),
D3DMgr(NULL), D3DMgr(NULL),
#endif #endif
@ -779,7 +779,7 @@ void CvCapture_MSMF::close()
bool CvCapture_MSMF::configureHW(bool enable) bool CvCapture_MSMF::configureHW(bool enable)
{ {
#ifdef HAVE_DXVA #ifdef HAVE_MSMF_DXVA
if ((enable && D3DMgr && D3DDev) || (!enable && !D3DMgr && !D3DDev)) if ((enable && D3DMgr && D3DDev) || (!enable && !D3DMgr && !D3DDev))
return true; return true;
if (!pMFCreateDXGIDeviceManager_initialized) if (!pMFCreateDXGIDeviceManager_initialized)
@ -976,7 +976,7 @@ bool CvCapture_MSMF::open(int _index)
SUCCEEDED(srAttr->SetUINT32(MF_SOURCE_READER_ENABLE_VIDEO_PROCESSING, FALSE)) && SUCCEEDED(srAttr->SetUINT32(MF_SOURCE_READER_ENABLE_VIDEO_PROCESSING, FALSE)) &&
SUCCEEDED(srAttr->SetUINT32(MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, TRUE))) SUCCEEDED(srAttr->SetUINT32(MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, TRUE)))
{ {
#ifdef HAVE_DXVA #ifdef HAVE_MSMF_DXVA
if (D3DMgr) if (D3DMgr)
srAttr->SetUnknown(MF_SOURCE_READER_D3D_MANAGER, D3DMgr.Get()); srAttr->SetUnknown(MF_SOURCE_READER_D3D_MANAGER, D3DMgr.Get());
#endif #endif
@ -1027,7 +1027,7 @@ bool CvCapture_MSMF::open(const cv::String& _filename)
SUCCEEDED(srAttr->SetUINT32(MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, true)) SUCCEEDED(srAttr->SetUINT32(MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, true))
) )
{ {
#ifdef HAVE_DXVA #ifdef HAVE_MSMF_DXVA
if(D3DMgr) if(D3DMgr)
srAttr->SetUnknown(MF_SOURCE_READER_D3D_MANAGER, D3DMgr.Get()); srAttr->SetUnknown(MF_SOURCE_READER_D3D_MANAGER, D3DMgr.Get());
#endif #endif

@ -0,0 +1,146 @@
# Import required modules
import cv2 as cv
import math
import argparse
############ Add argument parser for command line arguments ############
parser = argparse.ArgumentParser(description='Use this script to run TensorFlow implementation (https://github.com/argman/EAST) of EAST: An Efficient and Accurate Scene Text Detector (https://arxiv.org/abs/1704.03155v2)')
parser.add_argument('--input', help='Path to input image or video file. Skip this argument to capture frames from a camera.')
parser.add_argument('--model', required=True,
help='Path to a binary .pb file of model contains trained weights.')
parser.add_argument('--width', type=int, default=320,
help='Preprocess input image by resizing to a specific width. It should be multiple by 32.')
parser.add_argument('--height',type=int, default=320,
help='Preprocess input image by resizing to a specific height. It should be multiple by 32.')
parser.add_argument('--thr',type=float, default=0.5,
help='Confidence threshold.')
parser.add_argument('--nms',type=float, default=0.4,
help='Non-maximum suppression threshold.')
args = parser.parse_args()
############ Utility functions ############
def decode(scores, geometry, scoreThresh):
detections = []
confidences = []
############ CHECK DIMENSIONS AND SHAPES OF geometry AND scores ############
assert len(scores.shape) == 4, "Incorrect dimensions of scores"
assert len(geometry.shape) == 4, "Incorrect dimensions of geometry"
assert scores.shape[0] == 1, "Invalid dimensions of scores"
assert geometry.shape[0] == 1, "Invalid dimensions of geometry"
assert scores.shape[1] == 1, "Invalid dimensions of scores"
assert geometry.shape[1] == 5, "Invalid dimensions of geometry"
assert scores.shape[2] == geometry.shape[2], "Invalid dimensions of scores and geometry"
assert scores.shape[3] == geometry.shape[3], "Invalid dimensions of scores and geometry"
height = scores.shape[2]
width = scores.shape[3]
for y in range(0, height):
# Extract data from scores
scoresData = scores[0][0][y]
x0_data = geometry[0][0][y]
x1_data = geometry[0][1][y]
x2_data = geometry[0][2][y]
x3_data = geometry[0][3][y]
anglesData = geometry[0][4][y]
for x in range(0, width):
score = scoresData[x]
# If score is lower than threshold score, move to next x
if(score < scoreThresh):
continue
# Calculate offset
offsetX = x * 4.0
offsetY = y * 4.0
angle = anglesData[x]
# Calculate cos and sin of angle
cosA = math.cos(angle)
sinA = math.sin(angle)
h = x0_data[x] + x2_data[x]
w = x1_data[x] + x3_data[x]
# Calculate offset
offset = ([offsetX + cosA * x1_data[x] + sinA * x2_data[x], offsetY - sinA * x1_data[x] + cosA * x2_data[x]])
# Find points for rectangle
p1 = (-sinA * h + offset[0], -cosA * h + offset[1])
p3 = (-cosA * w + offset[0], sinA * w + offset[1])
center = (0.5*(p1[0]+p3[0]), 0.5*(p1[1]+p3[1]))
detections.append((center, (w,h), -1*angle * 180.0 / math.pi))
confidences.append(float(score))
# Return detections and confidences
return [detections, confidences]
def main():
# Read and store arguments
confThreshold = args.thr
nmsThreshold = args.nms
inpWidth = args.width
inpHeight = args.height
model = args.model
# Load network
net = cv.dnn.readNet(model)
# Create a new named window
kWinName = "EAST: An Efficient and Accurate Scene Text Detector"
cv.namedWindow(kWinName, cv.WINDOW_NORMAL)
outNames = []
outNames.append("feature_fusion/Conv_7/Sigmoid")
outNames.append("feature_fusion/concat_3")
# Open a video file or an image file or a camera stream
cap = cv.VideoCapture(args.input if args.input else 0)
while cv.waitKey(1) < 0:
# Read frame
hasFrame, frame = cap.read()
if not hasFrame:
cv.waitKey()
break
# Get frame height and width
height_ = frame.shape[0]
width_ = frame.shape[1]
rW = width_ / float(inpWidth)
rH = height_ / float(inpHeight)
# Create a 4D blob from frame.
blob = cv.dnn.blobFromImage(frame, 1.0, (inpWidth, inpHeight), (123.68, 116.78, 103.94), True, False)
# Run the model
net.setInput(blob)
outs = net.forward(outNames)
t, _ = net.getPerfProfile()
label = 'Inference time: %.2f ms' % (t * 1000.0 / cv.getTickFrequency())
# Get scores and geometry
scores = outs[0]
geometry = outs[1]
[boxes, confidences] = decode(scores, geometry, confThreshold)
# Apply NMS
indices = cv.dnn.NMSBoxesRotated(boxes, confidences, confThreshold,nmsThreshold)
for i in indices:
# get 4 corners of the rotated rect
vertices = cv.boxPoints(boxes[i[0]])
# scale the bounding box coordinates based on the respective ratios
for j in range(4):
vertices[j][0] *= rW
vertices[j][1] *= rH
for j in range(4):
p1 = (vertices[j][0], vertices[j][1])
p2 = (vertices[(j + 1) % 4][0], vertices[(j + 1) % 4][1])
cv.line(frame, p1, p2, (0, 255, 0), 1);
# Put efficiency information
cv.putText(frame, label, (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0))
# Display the frame
cv.imshow(kWinName,frame)
if __name__ == "__main__":
main()
Loading…
Cancel
Save