2016-09-08 7 views
0

Kinect v2 카메라 (Linux)를 사용하여 실제 xyz 좌표를 계산하려고하지만 계산 결과가 잘못됩니다. 나는 문제가 fx, fy, cxcy의 깊이 값 때문에 생각C++ Kinect v2 및 freenect2 : 깊이 데이터를 실제 좌표로 변환하는 방법

cv::Point3f xyzWorld={0.0f}; 

xyzWorld.z = pointDepth; 
xyzWorld.x = (float) ((float)x -(depthcx)) * xyzWorld.z/depthfx; 
xyzWorld.y = (float) ((float)y - (depthcy)) * xyzWorld.z/depthfy; 
xyzWorld.z = pointDepth; 

return xyzWorld; 

: 여기

는 코드입니다.

나를 도와 줄 사람이 있습니까?

나는 freenect2를 사용하고 있습니다.

+0

의견에 유용한 정보를 제공하는 대신 질문을 편집하십시오. – Garf365

답변

1

이유는 바로 OpenNi 구현을 사용하지

OniStatus VideoStream::convertDepthToWorldCoordinates(float depthX, float depthY, float depthZ, float* pWorldX, float* pWorldY, float* pWorldZ) 
{ 
    if (m_pSensorInfo->sensorType != ONI_SENSOR_DEPTH) 
    { 
     m_errorLogger.Append("convertDepthToWorldCoordinates: Stream is not from DEPTH\n"); 
     return ONI_STATUS_NOT_SUPPORTED; 
    } 

    float normalizedX = depthX/m_worldConvertCache.resolutionX - .5f; 
    float normalizedY = .5f - depthY/m_worldConvertCache.resolutionY; 

    OniVideoMode videoMode; 
    int size = sizeof(videoMode); 
    getProperty(ONI_STREAM_PROPERTY_VIDEO_MODE, &videoMode, &size); 

    float const convertToMillimeters = (videoMode.pixelFormat == ONI_PIXEL_FORMAT_DEPTH_100_UM) ? 10.f : 1.f; 
    *pWorldX = (normalizedX * depthZ * m_worldConvertCache.xzFactor)/convertToMillimeters; 
    *pWorldY = (normalizedY * depthZ * m_worldConvertCache.yzFactor)/convertToMillimeters; 
    *pWorldZ = depthZ/convertToMillimeters; 

    return ONI_STATUS_OK; 
} 

OniStatus VideoStream::convertWorldToDepthCoordinates(float worldX, float worldY, float worldZ, float* pDepthX, float* pDepthY, float* pDepthZ) 
{ 
    if (m_pSensorInfo->sensorType != ONI_SENSOR_DEPTH) 
    { 
     m_errorLogger.Append("convertWorldToDepthCoordinates: Stream is not from DEPTH\n"); 
     return ONI_STATUS_NOT_SUPPORTED; 
    } 

    *pDepthX = m_worldConvertCache.coeffX * worldX/worldZ + m_worldConvertCache.halfResX; 
    *pDepthY = m_worldConvertCache.halfResY - m_worldConvertCache.coeffY * worldY/worldZ; 
    *pDepthZ = worldZ; 
    return ONI_STATUS_OK; 
} 

세계 변환 캐시 :

void VideoStream::refreshWorldConversionCache() 
{ 
    if (m_pSensorInfo->sensorType != ONI_SENSOR_DEPTH) 
    { 
     return; 
    } 

    OniVideoMode videoMode; 
    int size = sizeof(videoMode); 
    getProperty(ONI_STREAM_PROPERTY_VIDEO_MODE, &videoMode, &size); 

    size = sizeof(float); 
    float horizontalFov; 
    float verticalFov; 
    getProperty(ONI_STREAM_PROPERTY_HORIZONTAL_FOV, &horizontalFov, &size); 
    getProperty(ONI_STREAM_PROPERTY_VERTICAL_FOV, &verticalFov, &size); 

    m_worldConvertCache.xzFactor = tan(horizontalFov/2) * 2; 
    m_worldConvertCache.yzFactor = tan(verticalFov/2) * 2; 
    m_worldConvertCache.resolutionX = videoMode.resolutionX; 
    m_worldConvertCache.resolutionY = videoMode.resolutionY; 
    m_worldConvertCache.halfResX = m_worldConvertCache.resolutionX/2; 
    m_worldConvertCache.halfResY = m_worldConvertCache.resolutionY/2; 
    m_worldConvertCache.coeffX = m_worldConvertCache.resolutionX/m_worldConvertCache.xzFactor; 
    m_worldConvertCache.coeffY = m_worldConvertCache.resolutionY/m_worldConvertCache.yzFactor; 
} 

struct WorldConversionCache 
    { 
     float xzFactor; 
     float yzFactor; 
     float coeffX; 
     float coeffY; 
     int resolutionX; 
     int resolutionY; 
     int halfResX; 
     int halfResY; 
    } m_worldConvertCache; 

모든 OpenNI GitHub repository

에서 촬영

각 프레임의 설명에서 직접 가져올 수있는 수평 및 수직 fov.