Comments (1)
my code below is a little miscellaneous, thank you very much if you have the patience to read it
the "uv_pv to (U,V,W,1) in Unity world coordinate system" method:
float* VideoFrameProcessor::PixelPointToWorldPoint(float(&uv_pv)[2])
{
// **frameMap is a Concurrent_unordered_map<int,frame's type> which stores the pv camera frame**
if (frameMap.size() <= 0)
{
float* empty = new float[4];
empty[0] = 0.0f;
empty[1] = 0.0f;
empty[2] = 0.0f;
empty[3] = 0.0f;
return empty;
}
// 1.Get current pv frame
auto m_latestFrame = frameMap.at(frameMap.size());
// **I store the frame with key 1->2->3->.... , so frameMap.size() means the new received frame**
// 2.Convert input "uv_pv" to "XY_pv" in camera coordinate system which z = 1
winrt::Windows::Foundation::Point XY_pv;
winrt::Windows::Foundation::Point uv_pv_point; uv_pv_point.X = uv_pv[0]; uv_pv_point.Y = uv_pv[1];
XY_pv = m_latestFrame.VideoMediaFrame().CameraIntrinsics().UnprojectAtUnitDepth(uv_pv_point); // Z = 1
auto PVToWorld = m_latestFrame.CoordinateSystem().TryGetTransformTo(m_worldCoordSystem).Value();
// **m_worldCoordSystem is set to Unity World Coordinate System**
XMMATRIX PVtoWorld_Transform;
PVtoWorld_Transform.r[0] = XMVectorSet(PVToWorld.m11, PVToWorld.m12, PVToWorld.m13, PVToWorld.m14);
PVtoWorld_Transform.r[1] = XMVectorSet(PVToWorld.m21, PVToWorld.m22, PVToWorld.m23, PVToWorld.m24);
PVtoWorld_Transform.r[2] = XMVectorSet(PVToWorld.m31, PVToWorld.m32, PVToWorld.m33, PVToWorld.m34);
PVtoWorld_Transform.r[3] = XMVectorSet(PVToWorld.m41, PVToWorld.m42, PVToWorld.m43, PVToWorld.m44);
// 3.Get the Point in Ref World
XMVECTOR CameraCoordVector = XMVectorSet(XY_pv.X, XY_pv.Y, -1, 1); // the camera looks down at -z, be sure that the unit plane's z mutiplied by a -1 factor
auto UVW_World = XMVector4Transform(CameraCoordVector, PVtoWorld_Transform); // XMVector4Transform(vector,matrix) = transpose(matrix)*vector
float* PointWorldCoord = new float[4];
PointWorldCoord[0] = XMVectorGetX(UVW_World);
PointWorldCoord[1] = XMVectorGetY(UVW_World);
PointWorldCoord[2] = XMVectorGetZ(UVW_World);
PointWorldCoord[3] = XMVectorGetW(UVW_World);
return PointWorldCoord;
}
and the method that input (U,V,W,1) and get the final real postion XYZ is:
input: pHL2ResearchMode UVWOne_pv[4] output: XYZ[3]
void HL2ResearchMode::WorldPointTransform_PVtoDepth(HL2ResearchMode* pHL2ResearchMode, float(&UVWOne_pv)[4] , float(&XYZ)[3])
{
// ----- 1. GET BUFFER -----
// ----- 2. GET (U,V) IN DEPTH IMAGE COORDINATES -----
//get CameraToRefWorld Transform matrix
Windows::Perception::Spatial::SpatialLocation transToWorld = nullptr;
auto ts = PerceptionTimestampHelper::FromSystemRelativeTargetTime(HundredsOfNanoseconds(checkAndConvertUnsigned(timestamp.HostTicks)));
transToWorld = pHL2ResearchMode->m_locator.TryLocateAtTimestamp(ts, pHL2ResearchMode->m_refFrame);
// **m_refFrame is set to the Unity World Coordinate System**
XMMATRIX depthToWorld = XMMatrixIdentity();
depthToWorld = pHL2ResearchMode->m_longDepthCameraPoseInvMatrix * SpatialLocationToDxMatrix(transToWorld); // inv(RigToCamera) * RigToWorld = CameraToWorld
//get uv for pixel's depth
float uv_depth[2];
XMMATRIX WorldToDepthCamera;
WorldToDepthCamera = XMMatrixInverse(nullptr, depthToWorld);
XMVECTOR UVW_World_vtr = XMVectorSet(UVWOne_pv[0], UVWOne_pv[1], UVWOne_pv[2], UVWOne_pv[3]);
auto XYZ_depth = XMVector4Transform(UVW_World_vtr, WorldToDepthCamera);
float XYZ_depth_value[4] = { XMVectorGetX(XYZ_depth), XMVectorGetY(XYZ_depth), XMVectorGetZ(XYZ_depth), XMVectorGetW(XYZ_depth) };
float xy_depth_norm[2] = { XYZ_depth_value[0] / XYZ_depth_value[2], XYZ_depth_value[1] / XYZ_depth_value[2] }; // to plane z = 1
// value validity check
if (( xy_depth_norm[0] > 1 || xy_depth_norm[0] < -1 ) || (xy_depth_norm[1] > 1 || xy_depth_norm[1] < -1) ) // means the value are out of range [-1,1] , also means invalid values in the depth buffer
{
XYZ[0] = 0; XYZ[1] = 0;
XYZ[2] = 0;
return;
}
pHL2ResearchMode->m_pLongDepthCameraSensor->MapCameraSpaceToImagePoint(xy_depth_norm, uv_depth);
// round the uv coordinates to integer (with type float )
uv_depth[0] = round(uv_depth[0]);
uv_depth[1] = round(uv_depth[1]);
// ----- 3. GET THE DEPTH AND THE COORDINATES IN WORLD SPACE -----
//coordinate transformation
auto idx = int(uv_depth[0]) + int(uv_depth[1]) * resolution.Width;
UINT16 depth = pDepth[idx]; // **pDepth is the depth buffer**
depth = (pSigma[idx] & 0x80) ? 0 : depth - pHL2ResearchMode->m_depthOffset;
float xy[2] = { 0, 0 };
pHL2ResearchMode->m_pLongDepthCameraSensor->MapImagePointToCameraUnitPlane(uv_depth, xy);
auto pointOnUnitPlane = XMFLOAT3(xy[0], xy[1], 1);
auto tempPoint = depth / 1000 * XMVector3Normalize(XMLoadFloat3(&pointOnUnitPlane));
//get the target point coordinate
auto pointInWorld = XMVector3Transform(tempPoint, depthToWorld);
XYZ[0] = XMVectorGetX(pointInWorld);
XYZ[1] = XMVectorGetY(pointInWorld);
XYZ[2] = -XMVectorGetZ(pointInWorld);
// ----- 4. Release and Close -----
}
from hololens2forcv.
Related Issues (20)
- Streaming Sensor Data HOT 1
- What is the RGB autofocus camera sensor's resolution?
- Getting pixels per mm of Depth Camera
- variable sampling frequency of rgb and depth cameras in StreamRecorder application
- CameraWithCVAndCalibration runtime error: Unable to activate Microsoft Store app HOT 3
- Can I get spatial mapping directly from the Research Mode?
- PV camera frame's origin HOT 1
- Interpreting depth camera data
- Samples cannt install after the system is updated HOT 1
- Explanation about vectors in StreamRecorder
- sample code StreamRecorder can compile on unity?
- Low resolution on PV stream HOT 1
- Long Throw mode resolution
- Undistort frames from LF camera
- Any ways to extract the TSDF volume?
- HOLOLENS 2 RESEARCH MODE COORDINATE SYSTEM AND ROTOTRANSLATIONS BETWEEN SENSORS
- Low-signal depth mode in the Windows 11 update
- problem with git lfs
- Internal Error when Deploying Sensor Visualization(twinapi.core.dll)
Recommend Projects
-
React
A declarative, efficient, and flexible JavaScript library for building user interfaces.
-
Vue.js
🖖 Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.
-
Typescript
TypeScript is a superset of JavaScript that compiles to clean JavaScript output.
-
TensorFlow
An Open Source Machine Learning Framework for Everyone
-
Django
The Web framework for perfectionists with deadlines.
-
Laravel
A PHP framework for web artisans
-
D3
Bring data to life with SVG, Canvas and HTML. 📊📈🎉
-
Recommend Topics
-
javascript
JavaScript (JS) is a lightweight interpreted programming language with first-class functions.
-
web
Some thing interesting about web. New door for the world.
-
server
A server is a program made to process requests and deliver data to clients.
-
Machine learning
Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.
-
Visualization
Some thing interesting about visualization, use data art
-
Game
Some thing interesting about game, make everyone happy.
Recommend Org
-
Facebook
We are working to build community through open source technology. NB: members must have two-factor auth.
-
Microsoft
Open source projects and samples from Microsoft.
-
Google
Google ❤️ Open Source for everyone.
-
Alibaba
Alibaba Open Source for everyone
-
D3
Data-Driven Documents codes.
-
Tencent
China tencent open source team.
from hololens2forcv.