Нарушение доступа место записи opencv_world (HololensforCv, opencv,aruco)

#opencv3.0 #access-violation #hololens #aruco

Вопрос:

Мне нужна твоя помощь. У меня есть эта проблема в моем коде, и я не понимаю, почему … У тебя есть какие-нибудь идеи ? Я новичок в opencv…
Я пытаюсь обнаружить сетку aruco, чтобы у нее были rvec и tvec, но когда я запускаю свое приложение на Hololens, у меня возникает эта ошибка … Исключение, выданное при 0x7077C6B6 (opencv_world3411.dll) в GnathiX.exe: 0xC0000005: Место записи с нарушением доступа 0x00000000. Посмотрите на это изображение в поисках ошибки

 int _nbmx = 2;  int _nbmy = 2;  double _separation = 0.002;   float separation = (float)_separation;   // Clear the prior interface vector containing  // detected aruco markers  Windows::Foundation::Collections::IVectorlt;DetectedArUcoMarker^gt;^ detectedMarkers  = ref new Platform::Collections::Vectorlt;DetectedArUcoMarker^gt;();   // If null sensor frame, return zero detections  if (pvSensorFrame == nullptr)  {  DetectedArUcoMarker^ zeroMarker = ref new DetectedArUcoMarker(  0,  Windows::Foundation::Numerics::float3::zero(),  Windows::Foundation::Numerics::float3::zero(),  Windows::Foundation::Numerics::float4x4::identity());   detectedMarkers-gt;Append(zeroMarker);  return detectedMarkers;  }   else  {  // https://docs.opencv.org/4.1.1/d5/dae/tutorial_aruco_detection.html  cv::Mat wrappedMat;  std::vectorlt;std::vectorlt;cv::Point2fgt;gt; markers, rejectedCandidates;  std::vectorlt;int32_tgt; markerIds;   // Create the aruco dictionary from id  cv::Ptrlt;cv::aruco::Dictionarygt; dictionary =  cv::aruco::getPredefinedDictionary(_dictId);   // Create detector parameters  cv::Ptrlt;cv::aruco::DetectorParametersgt; detectorParams  = cv::aruco::DetectorParameters::create();   // Use wrapper method to get cv::Mat from sensor frame  // Can I directly stream gray frames from pv camera?  Utils::WrapHoloLensSensorFrameWithCvMat(pvSensorFrame, wrappedMat);   // Convert cv::Mat to grayscale for detection  cv::Mat grayMat;  cv::cvtColor(wrappedMat, grayMat, cv::COLOR_BGRA2GRAY);  //Create Board  cv::Ptrlt;cv::aruco::GridBoardgt; Marker1 = cv::aruco::GridBoard::create(_nbmx, _nbmy, _markerSize, separation, dictionary, 0);  //cv::Ptrlt;cv::aruco::Boardgt; Marker1 = Marker1.staticCastlt;cv::aruco::Boardgt;();   // Detect markers  cv::aruco::detectMarkers(  grayMat,  dictionary,  markers,  markerIds,  detectorParams,  rejectedCandidates);   dbg::trace(  L"ArUcoMarkerTracker::DetectArUcoMarkersInFrame: %i markers found",  markerIds.size());   // If we have detected markers, compute the transform  // to relate WinRT (right-handed row-vector) and Unity  // (left-handed column-vector) representations for transforms  // WinRT transfrom -gt; Unity transform by transpose and flip z values  if (!markerIds.empty())  {  // Get transform to relate unity coordinate system to camera (cameraToUnity)  // https://github.com/chrisfromwork/HoloLensForCV/blob/master/Samples/UnityArUcoMarkerDetectorPlugin/ArUcoMarkerDetector.cpp   Platform::IBoxlt;Windows::Foundation::Numerics::float4x4gt;^ cameraToUnityReference =  pvSensorFrame-gt;FrameCoordinateSystem-gt;TryGetTransformTo(_unitySpatialCoordinateSystem);   if (!cameraToUnityReference)  {  dbg::trace(L"Failed to obtain transform to unity coordinate space.");  DetectedArUcoMarker^ marker = ref new DetectedArUcoMarker(  0,  Windows::Foundation::Numerics::float3::zero(),  Windows::Foundation::Numerics::float3::zero(),  Windows::Foundation::Numerics::float4x4::identity());   // Add the marker to interface vector of markers  detectedMarkers-gt;Append(marker);  return detectedMarkers;   dbg::trace(  L"Etape 2");  }   // Get camera to unity transformation matrix  Windows::Foundation::Numerics::float4x4 cameraToUnity = cameraToUnityReference-gt;Value;  dbg::trace(  L"Etape 3");  // Get transform to relate view coordinate system to camera coordinate system (viewToCamera)  Windows::Foundation::Numerics::float4x4 viewToCamera;  Windows::Foundation::Numerics::invert(pvSensorFrame-gt;CameraViewTransform, amp;viewToCamera);   // Compute transform to relate the WinRT coordinate frame with Unity coordinate frame (viewToUnity)  // WinRT transfrom -gt; Unity transform by transpose and flip z values  // https://github.com/chrisfromwork/HoloLensForCV/blob/master/Samples/UnityArUcoMarkerDetectorPlugin/ArUcoMarkerDetector.cpp  Windows::Foundation::Numerics::float4x4 viewToUnityWinRT = viewToCamera * cameraToUnity;  Windows::Foundation::Numerics::float4x4 viewToUnity =  Windows::Foundation::Numerics::transpose(viewToUnityWinRT);  viewToUnity.m31 *= -1.0f;  viewToUnity.m32 *= -1.0f;  viewToUnity.m33 *= -1.0f;  viewToUnity.m34 *= -1.0f;  dbg::trace(  L"Etape 4");  // Set camera intrinsic parameters for aruco based pose estimation  cv::Mat cameraMatrix(3, 3, CV_64F, cv::Scalar(0));  cameraMatrix.atlt;doublegt;(0, 0) = pvSensorFrame-gt;CoreCameraIntrinsics-gt;FocalLength.x;  cameraMatrix.atlt;doublegt;(0, 2) = pvSensorFrame-gt;CoreCameraIntrinsics-gt;PrincipalPoint.x;  cameraMatrix.atlt;doublegt;(1, 1) = pvSensorFrame-gt;CoreCameraIntrinsics-gt;FocalLength.y;  cameraMatrix.atlt;doublegt;(1, 2) = pvSensorFrame-gt;CoreCameraIntrinsics-gt;PrincipalPoint.y;  cameraMatrix.atlt;doublegt;(2, 2) = 1.0;  dbg::trace(  L"Etape 5");  // Set distortion matrix for aruco based pose estimation  cv::Mat distortionCoefficientsMatrix(1, 5, CV_64F);  distortionCoefficientsMatrix.atlt;doublegt;(0, 0) = pvSensorFrame-gt;CoreCameraIntrinsics-gt;RadialDistortion.x;  distortionCoefficientsMatrix.atlt;doublegt;(0, 1) = pvSensorFrame-gt;CoreCameraIntrinsics-gt;RadialDistortion.y;  distortionCoefficientsMatrix.atlt;doublegt;(0, 2) = pvSensorFrame-gt;CoreCameraIntrinsics-gt;TangentialDistortion.x;  distortionCoefficientsMatrix.atlt;doublegt;(0, 3) = pvSensorFrame-gt;CoreCameraIntrinsics-gt;TangentialDistortion.y;  distortionCoefficientsMatrix.atlt;doublegt;(0, 4) = pvSensorFrame-gt;CoreCameraIntrinsics-gt;RadialDistortion.z;  dbg::trace(  L"Etape 6");  // Vectors for pose (translation and rotation) estimation  std::vectorlt;cv::Vec3dgt; rVecs;  std::vectorlt;cv::Vec3dgt; tVecs;  dbg::trace(  L"Etape 7");    cv::aruco::refineDetectedMarkers(  grayMat,  Marker1,  markers,  markerIds,  rejectedCandidates,  cameraMatrix,  distortionCoefficientsMatrix);  dbg::trace(  L"refinedDetectedMarker");  // Estimate pose of single markers  /* cv::aruco::estimatePoseSingleMarkers(  markers,  _markerSize,  cameraMatrix,  distortionCoefficientsMatrix,  rVecs,  tVecs);  dbg::trace(  L"Estimation pose");*/    cv::setBreakOnError(true);  cv::aruco::estimatePoseBoard(  markers,  markerIds,  Marker1,  cameraMatrix,  distortionCoefficientsMatrix,  rVecs,  tVecs);   dbg::trace(  L"Estimate poseboard");   cv::setBreakOnError(true);  ////double lenght = 0.02;  ////float lenghtf = (float)lenght;   //int i = 1;  //if (markerOfBoardDetected gt; 0)  // //cv::aruco::drawAxis(grayMat, cameraMatrix, distortionCoefficientsMatrix, rVecs, tVecs, lenghtf);  //{  // DetectedArUcoMarker^ marker = ref new DetectedArUcoMarker(  // i,   // Windows::Foundation::Numerics::float3((float)tVecs[i][0], (float)tVecs[i][1], (float)tVecs[i][2]),  // Windows::Foundation::Numerics::float3((float)rVecs[i][0], (float)rVecs[i][1], (float)rVecs[i][2]),  // viewToUnity);  // detectedMarkers-gt;Append(marker);  //}    //dbg::trace(  // L"Pose et rota dans matrix pour unity");  // Iterate across the detected marker ids and cache information of   // pose of each marker as well as marker id  //for (size_t i = 0; i lt; markerIds.size(); i  )  //{  // // Create marker WinRT marker class instance with current  // // detected marker parameters and view to unity transform  // DetectedArUcoMarker^ marker = ref new DetectedArUcoMarker(  // markerIds[i],  // Windows::Foundation::Numerics::float3((float)tVecs[i][0], (float)tVecs[i][1], (float)tVecs[i][2]),  // Windows::Foundation::Numerics::float3((float)rVecs[i][0], (float)rVecs[i][1], (float)rVecs[i][2]),  // viewToUnity);   // // Add the marker to interface vector of markers  // detectedMarkers-gt;Append(marker);  //}  }   return detectedMarkers;  } }  

} `