diff --git a/Modules/ToFHardware/KinectV2/mitkKinectV2Controller.cpp b/Modules/ToFHardware/KinectV2/mitkKinectV2Controller.cpp index e2239798bb..bff2ec2082 100644 --- a/Modules/ToFHardware/KinectV2/mitkKinectV2Controller.cpp +++ b/Modules/ToFHardware/KinectV2/mitkKinectV2Controller.cpp @@ -1,750 +1,760 @@ /*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #include "mitkKinectV2Controller.h" //Kinect V2 SDK #include //VTK #include #include #include #include #include #include #include #include //Taken from official Microsoft SDK samples. Should never be public or part of the class, //because it is just for cleaning up. // Safe release for interfaces template inline void SafeRelease(Interface *& pInterfaceToRelease) { if (pInterfaceToRelease != NULL) { pInterfaceToRelease->Release(); pInterfaceToRelease = NULL; } } namespace mitk { class KinectV2Controller::KinectV2ControllerPrivate { public: KinectV2ControllerPrivate(); ~KinectV2ControllerPrivate(); //bool ErrorText(unsigned int error); IKinectSensor* m_pKinectSensor;/// m_PolyData;///< Conversion of m_CameraCoordinates to vtkPolyData double m_TriangulationThreshold; ///< Threshold to cut off vertices from triangulation bool m_GenerateTriangularMesh; ///< Apply triangulation or not }; KinectV2Controller::KinectV2ControllerPrivate::KinectV2ControllerPrivate(): m_pKinectSensor(NULL), m_pMultiSourceFrameReader(NULL), m_pCoordinateMapper(NULL), m_pColorRGBX(NULL), m_ConnectionCheck(false), m_DepthCaptureWidth(512), m_DepthCaptureHeight(424), m_RGBCaptureWidth(1920), m_RGBCaptureHeight(1080), m_RGBBufferSize(1920*1080*3), m_DepthBufferSize(sizeof(float)*512*424), m_CameraCoordinates(NULL), m_ColorPoints(NULL), m_PolyData(NULL), m_TriangulationThreshold(0.0), m_GenerateTriangularMesh(false) { // create heap storage for color pixel data in RGBX format m_pColorRGBX = new RGBQUAD[m_RGBCaptureWidth * m_RGBCaptureHeight]; //initialize 3D world coordinates and texture coordinates m_CameraCoordinates = new CameraSpacePoint[m_DepthCaptureWidth * m_DepthCaptureHeight]; m_ColorPoints = new ColorSpacePoint[m_DepthCaptureWidth * m_DepthCaptureHeight]; m_PolyData = vtkSmartPointer::New(); } KinectV2Controller::KinectV2ControllerPrivate::~KinectV2ControllerPrivate() { MITK_INFO << "~KinectV2ControllerPrivate"; } KinectV2Controller::KinectV2Controller(): d(new KinectV2ControllerPrivate) { } KinectV2Controller::~KinectV2Controller() { MITK_INFO << "~KinectV2Controller"; delete d; } bool KinectV2Controller::OpenCameraConnection() { if (!d->m_ConnectionCheck) { HRESULT hr; d->m_ConnectionCheck = true; hr = GetDefaultKinectSensor(&d->m_pKinectSensor); if (FAILED(hr)) { d->m_ConnectionCheck = false; } else { hr = d->m_pKinectSensor->get_CoordinateMapper(&d->m_pCoordinateMapper); if (FAILED(hr)) { d->m_ConnectionCheck = false; } hr = d->m_pKinectSensor->Open(); } if (!d->m_pKinectSensor || FAILED(hr)) { d->m_ConnectionCheck = false; MITK_WARN << "No Kinect 2 ready!"; } else { MITK_INFO << "Kinect 2 succesfully connected"; } } return d->m_ConnectionCheck; } bool KinectV2Controller::InitializeMultiFrameReader() { //check if it is already initialized if((d->m_ConnectionCheck) && (d->m_pMultiSourceFrameReader)) { return true; } else //initialize the frame reader { HRESULT hr = d->m_pKinectSensor->OpenMultiSourceFrameReader( FrameSourceTypes::FrameSourceTypes_Depth | FrameSourceTypes::FrameSourceTypes_Color | FrameSourceTypes::FrameSourceTypes_Infrared, &d->m_pMultiSourceFrameReader); if (SUCCEEDED(hr) && (d->m_pMultiSourceFrameReader)) { MITK_INFO << "KinectV2 MultiFrameReader initialized"; return true; } } return false; } bool KinectV2Controller::CloseCameraConnection() { // done with depth frame reader MITK_INFO << "CloseConnection"; SafeRelease(d->m_pMultiSourceFrameReader); // close the Kinect Sensor if(d->m_pKinectSensor) { d->m_pKinectSensor->Close(); } SafeRelease(d->m_pKinectSensor); d->m_ConnectionCheck = false; return true; } bool KinectV2Controller::UpdateCamera() { //Acquire lastest frame updates the camera and for //unknown reasons I cannot use it here in UpdateCamera() //without resulting in random crashes of the app. return true; } void KinectV2Controller::GetDistances(float* distances) { if(!InitializeMultiFrameReader()) { MITK_ERROR << "Unable to initialize MultiFrameReader"; return; } IMultiSourceFrame* pMultiSourceFrame = NULL; IDepthFrame* pDepthFrame = NULL; HRESULT hr = -1; //SDK error format static DWORD lastTime = 0; DWORD currentTime = GetTickCount(); //Check if we do not request data faster than 30 FPS. Kinect V2 can only deliver 30 FPS. if( unsigned int(currentTime - lastTime) > 33 ) { hr = d->m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame); lastTime = currentTime; } if (SUCCEEDED(hr)) { IDepthFrameReference* pDepthFrameReference = NULL; hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference); if (SUCCEEDED(hr)) { hr = pDepthFrameReference->AcquireFrame(&pDepthFrame); } SafeRelease(pDepthFrameReference); } if (SUCCEEDED(hr)) { UINT nDepthBufferSize = 0; UINT16 *pDepthBuffer = NULL; if (SUCCEEDED(hr)) { hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer); } if (SUCCEEDED(hr)) { UINT pointCount = d->m_DepthCaptureWidth * d->m_DepthCaptureHeight; d->m_pCoordinateMapper->MapDepthFrameToCameraSpace(pointCount, pDepthBuffer, pointCount, d->m_CameraCoordinates); vtkSmartPointer points = vtkSmartPointer::New(); vtkSmartPointer vertices = vtkSmartPointer::New(); vtkSmartPointer textureCoordinates = vtkSmartPointer::New(); textureCoordinates->SetNumberOfComponents(2); textureCoordinates->Allocate(pointCount); d->m_pCoordinateMapper->MapDepthFrameToColorSpace(pointCount, pDepthBuffer, pointCount, d->m_ColorPoints); for(int i = 0; i < d->m_DepthCaptureHeight*d->m_DepthCaptureWidth; ++i) { vtkIdType id = points->InsertNextPoint(d->m_CameraCoordinates[i].X, d->m_CameraCoordinates[i].Y, d->m_CameraCoordinates[i].Z); vertices->InsertNextCell(1); vertices->InsertCellPoint(id); distances[i] = static_cast(*pDepthBuffer); ++pDepthBuffer; ColorSpacePoint colorPoint = d->m_ColorPoints[i]; // retrieve the depth to color mapping for the current depth pixel int colorInDepthX = (int)(floor(colorPoint.X + 0.5)); int colorInDepthY = (int)(floor(colorPoint.Y + 0.5)); float xNorm = static_cast(colorInDepthX)/d->m_RGBCaptureWidth; float yNorm = static_cast(colorInDepthY)/d->m_RGBCaptureHeight; // make sure the depth pixel maps to a valid point in color space if ( colorInDepthX >= 0 && colorInDepthX < d->m_RGBCaptureWidth && colorInDepthY >= 0 && colorInDepthY < d->m_RGBCaptureHeight ) { textureCoordinates->InsertTuple2(id, xNorm, yNorm); } } d->m_PolyData = vtkSmartPointer::New(); d->m_PolyData->SetPoints(points); d->m_PolyData->SetVerts(vertices); d->m_PolyData->GetPointData()->SetTCoords(textureCoordinates); } else { MITK_ERROR << "AccessUnderlyingBuffer"; } } SafeRelease(pDepthFrame); SafeRelease(pMultiSourceFrame); if( hr != -1 && !SUCCEEDED(hr) ) { //The thread gets here, if the data is requested faster than the device can deliver it. //This may happen from time to time. MITK_DEBUG << "HR result false in KinectV2Controller::GetDistances()"; return; } } void KinectV2Controller::GetRgb(unsigned char* rgb) { if(!InitializeMultiFrameReader()) { MITK_ERROR << "Unable to initialize MultiFrameReader"; return; } IMultiSourceFrame* pMultiSourceFrame = NULL; IColorFrame* pColorFrame = NULL; HRESULT hr = -1; static DWORD lastTime = 0; DWORD currentTime = GetTickCount(); //Check if we do not request data faster than 30 FPS. Kinect V2 can only deliver 30 FPS. if( unsigned int(currentTime - lastTime) > 33 ) { hr = d->m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame); lastTime = currentTime; } ColorImageFormat imageFormat = ColorImageFormat_None; UINT nColorBufferSize = 0; RGBQUAD *pColorBuffer = NULL; // get color frame data if (SUCCEEDED(hr)) { hr = pColorFrame->get_RawColorImageFormat(&imageFormat); } if (SUCCEEDED(hr)) { if (imageFormat == ColorImageFormat_Bgra) { hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast(&pColorBuffer)); } else if (d->m_pColorRGBX) { pColorBuffer = d->m_pColorRGBX; nColorBufferSize = d->m_RGBCaptureWidth * d->m_RGBCaptureHeight * sizeof(RGBQUAD); hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast(pColorBuffer), ColorImageFormat_Bgra); } else { hr = E_FAIL; } if (SUCCEEDED(hr)) { for(int i = 0; i < d->m_RGBBufferSize; i+=3) { //convert from BGR to RGB rgb[i+0] = pColorBuffer->rgbRed; rgb[i+1] = pColorBuffer->rgbGreen; rgb[i+2] = pColorBuffer->rgbBlue; ++pColorBuffer; } } } SafeRelease(pColorFrame); SafeRelease(pMultiSourceFrame); if( hr != -1 && !SUCCEEDED(hr) ) { //The thread gets here, if the data is requested faster than the device can deliver it. //This may happen from time to time. MITK_DEBUG << "HR result false in KinectV2Controller::GetRgb()"; } } void KinectV2Controller::GetAllData(float* distances, float* amplitudes, unsigned char* rgb) { if(!InitializeMultiFrameReader()) { MITK_ERROR << "Unable to initialize MultiFrameReader"; return; } IMultiSourceFrame* pMultiSourceFrame = NULL; IDepthFrame* pDepthFrame = NULL; IColorFrame* pColorFrame = NULL; IInfraredFrame* pInfraRedFrame = NULL; HRESULT hr = -1; static DWORD lastTime = 0; DWORD currentTime = GetTickCount(); //Check if we do not request data faster than 30 FPS. Kinect V2 can only deliver 30 FPS. if( unsigned int(currentTime - lastTime) > 33 ) { hr = d->m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame); lastTime = currentTime; } if (SUCCEEDED(hr)) { IDepthFrameReference* pDepthFrameReference = NULL; hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference); if (SUCCEEDED(hr)) { hr = pDepthFrameReference->AcquireFrame(&pDepthFrame); } SafeRelease(pDepthFrameReference); } if (SUCCEEDED(hr)) { IColorFrameReference* pColorFrameReference = NULL; hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference); if (SUCCEEDED(hr)) { hr = pColorFrameReference->AcquireFrame(&pColorFrame); } SafeRelease(pColorFrameReference); } if (SUCCEEDED(hr)) { IInfraredFrameReference* pInfraredFrameReference = NULL; hr = pMultiSourceFrame->get_InfraredFrameReference(&pInfraredFrameReference); if (SUCCEEDED(hr)) { hr = pInfraredFrameReference->AcquireFrame(&pInfraRedFrame); } SafeRelease(pInfraredFrameReference); } if (SUCCEEDED(hr)) { UINT nDepthBufferSize = 0; UINT16 *pDepthBuffer = NULL; UINT16 *pInfraRedBuffer = NULL; ColorImageFormat imageFormat = ColorImageFormat_None; UINT nColorBufferSize = 0; RGBQUAD *pColorBuffer = NULL; if (SUCCEEDED(hr)) { hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer); } if (SUCCEEDED(hr)) { hr = pInfraRedFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pInfraRedBuffer); } if (SUCCEEDED(hr)) { UINT pointCount = d->m_DepthCaptureWidth * d->m_DepthCaptureHeight; d->m_pCoordinateMapper->MapDepthFrameToCameraSpace(pointCount, pDepthBuffer, pointCount, d->m_CameraCoordinates); vtkSmartPointer points = vtkSmartPointer::New(); vtkSmartPointer vertices = vtkSmartPointer::New(); vtkSmartPointer polys = vtkSmartPointer::New(); const double meterfactor = 1000.0; vtkSmartPointer textureCoordinates = vtkSmartPointer::New(); vtkSmartPointer vertexIdList = vtkSmartPointer::New(); vertexIdList->Allocate(pointCount); vertexIdList->SetNumberOfIds(pointCount); for(unsigned int i = 0; i < pointCount; ++i) { vertexIdList->SetId(i, 0); } std::vector isPointValid; isPointValid.resize(pointCount); //Allocate the object once else it would automatically allocate new memory //for every vertex and perform a copy which is expensive. vertexIdList->Allocate(pointCount); vertexIdList->SetNumberOfIds(pointCount); textureCoordinates->SetNumberOfComponents(2); textureCoordinates->Allocate(pointCount); d->m_pCoordinateMapper->MapDepthFrameToColorSpace(pointCount, pDepthBuffer, pointCount, d->m_ColorPoints); for(int j = 0; j < d->m_DepthCaptureHeight; ++j) { for(int i = 0; i < d->m_DepthCaptureWidth; ++i) { unsigned int pixelID = i+j*d->m_DepthCaptureWidth; distances[pixelID] = static_cast(*pDepthBuffer); amplitudes[pixelID] = static_cast(*pInfraRedBuffer); ++pDepthBuffer; ++pInfraRedBuffer; if (d->m_CameraCoordinates[pixelID].Z<=mitk::eps) { isPointValid[pixelID] = false; } else { isPointValid[pixelID] = true; //VTK would insert empty points into the polydata if we use //points->InsertPoint(pixelID, cartesianCoordinates.GetDataPointer()). //If we use points->InsertNextPoint(...) instead, the ID's do not //correspond to the image pixel ID's. Thus, we have to save them //in the vertexIdList. //Kinect SDK delivers world coordinates in meters, so we have to //convert to mm for MITK. - vertexIdList->SetId(pixelID, points->InsertNextPoint(d->m_CameraCoordinates[pixelID].X*meterfactor, d->m_CameraCoordinates[pixelID].Y*meterfactor, d->m_CameraCoordinates[pixelID].Z*meterfactor)); - - ColorSpacePoint colorPoint = d->m_ColorPoints[pixelID]; - // retrieve the depth to color mapping for the current depth pixel - int colorInDepthX = (int)(floor(colorPoint.X + 0.5)); - int colorInDepthY = (int)(floor(colorPoint.Y + 0.5)); - - float xNorm = static_cast(colorInDepthX)/d->m_RGBCaptureWidth; - float yNorm = static_cast(colorInDepthY)/d->m_RGBCaptureHeight; - - // make sure the depth pixel maps to a valid point in color space - if ( colorInDepthX >= 0 && colorInDepthX < d->m_RGBCaptureWidth && colorInDepthY >= 0 && colorInDepthY < d->m_RGBCaptureHeight ) - { - textureCoordinates->InsertTuple2(vertexIdList->GetId(pixelID), xNorm, yNorm); - } + vertexIdList->SetId(pixelID, points->InsertNextPoint(-d->m_CameraCoordinates[pixelID].X*meterfactor, -d->m_CameraCoordinates[pixelID].Y*meterfactor, d->m_CameraCoordinates[pixelID].Z*meterfactor)); if (d->m_GenerateTriangularMesh) { if((i >= 1) && (j >= 1)) { //This little piece of art explains the ID's: // // P(x_1y_1)---P(xy_1) // | | // | | // | | // P(x_1y)-----P(xy) // //We can only start triangulation if we are at vertex (1,1), //because we need the other 3 vertices near this one. //To go one pixel line back in the image array, we have to //subtract 1x xDimension. vtkIdType xy = pixelID; vtkIdType x_1y = pixelID-1; vtkIdType xy_1 = pixelID-d->m_DepthCaptureWidth; vtkIdType x_1y_1 = xy_1-1; //Find the corresponding vertex ID's in the saved vertexIdList: vtkIdType xyV = vertexIdList->GetId(xy); vtkIdType x_1yV = vertexIdList->GetId(x_1y); vtkIdType xy_1V = vertexIdList->GetId(xy_1); vtkIdType x_1y_1V = vertexIdList->GetId(x_1y_1); if (isPointValid[xy]&&isPointValid[x_1y]&&isPointValid[x_1y_1]&&isPointValid[xy_1]) // check if points of cell are valid { double pointXY[3], pointX_1Y[3], pointXY_1[3], pointX_1Y_1[3]; points->GetPoint(xyV, pointXY); points->GetPoint(x_1yV, pointX_1Y); points->GetPoint(xy_1V, pointXY_1); points->GetPoint(x_1y_1V, pointX_1Y_1); if( (mitk::Equal(d->m_TriangulationThreshold, 0.0)) || ((vtkMath::Distance2BetweenPoints(pointXY, pointX_1Y) <= d->m_TriangulationThreshold) && (vtkMath::Distance2BetweenPoints(pointXY, pointXY_1) <= d->m_TriangulationThreshold) && (vtkMath::Distance2BetweenPoints(pointX_1Y, pointX_1Y_1) <= d->m_TriangulationThreshold) && (vtkMath::Distance2BetweenPoints(pointXY_1, pointX_1Y_1) <= d->m_TriangulationThreshold))) { polys->InsertNextCell(3); polys->InsertCellPoint(x_1yV); polys->InsertCellPoint(xyV); polys->InsertCellPoint(x_1y_1V); polys->InsertNextCell(3); polys->InsertCellPoint(x_1y_1V); polys->InsertCellPoint(xyV); polys->InsertCellPoint(xy_1V); } else { //We dont want triangulation, but we want to keep the vertex vertices->InsertNextCell(1); vertices->InsertCellPoint(xyV); } } } } else { //We dont want triangulation, we only want vertices vertices->InsertNextCell(1); vertices->InsertCellPoint(vertexIdList->GetId(pixelID)); } + + ColorSpacePoint colorPoint = d->m_ColorPoints[pixelID]; + // retrieve the depth to color mapping for the current depth pixel + int colorInDepthX = (int)(floor(colorPoint.X + 0.5)); + int colorInDepthY = (int)(floor(colorPoint.Y + 0.5)); + + float xNorm = -static_cast(colorInDepthX)/d->m_RGBCaptureWidth; + float yNorm = static_cast(colorInDepthY)/d->m_RGBCaptureHeight; + + // make sure the depth pixel maps to a valid point in color space + if ( colorInDepthX >= 0 && colorInDepthX < d->m_RGBCaptureWidth && colorInDepthY >= 0 && colorInDepthY < d->m_RGBCaptureHeight ) + { + textureCoordinates->InsertTuple2(vertexIdList->GetId(pixelID), xNorm, yNorm); + } + else + { + textureCoordinates->InsertTuple2(vertexIdList->GetId(pixelID), 0, 0); + } } } } d->m_PolyData = vtkSmartPointer::New(); d->m_PolyData->SetPoints(points); d->m_PolyData->SetVerts(vertices); d->m_PolyData->SetPolys(polys); d->m_PolyData->GetPointData()->SetTCoords(textureCoordinates); } else { MITK_ERROR << "AccessUnderlyingBuffer"; } // get color frame data if (SUCCEEDED(hr)) { hr = pColorFrame->get_RawColorImageFormat(&imageFormat); } if (SUCCEEDED(hr)) { if (imageFormat == ColorImageFormat_Bgra) { hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast(&pColorBuffer)); } else if (d->m_pColorRGBX) { pColorBuffer = d->m_pColorRGBX; nColorBufferSize = d->m_RGBCaptureWidth * d->m_RGBCaptureHeight * sizeof(RGBQUAD); hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast(pColorBuffer), ColorImageFormat_Bgra); } else { hr = E_FAIL; } if (SUCCEEDED(hr)) { - for(int i = 0; i < d->m_RGBBufferSize; i+=3) + for(int j = 0; j < d->m_RGBCaptureHeight; ++j) { - //convert from BGR to RGB - rgb[i+0] = pColorBuffer->rgbRed; - rgb[i+1] = pColorBuffer->rgbGreen; - rgb[i+2] = pColorBuffer->rgbBlue; - ++pColorBuffer; + for(int i = 0; i < d->m_RGBCaptureWidth; ++i) + { + //the buffer has the size of 3*ResolutionX/Y (one for each color value) + //thats why die id is multiplied by 3. + unsigned int id = ((d->m_RGBCaptureWidth - i - 1) + j*d->m_RGBCaptureWidth)*3; + //convert from BGR to RGB + rgb[id+0] = pColorBuffer->rgbRed; + rgb[id+1] = pColorBuffer->rgbGreen; + rgb[id+2] = pColorBuffer->rgbBlue; + ++pColorBuffer; + } } } } } SafeRelease(pDepthFrame); SafeRelease(pColorFrame); SafeRelease(pInfraRedFrame); SafeRelease(pMultiSourceFrame); if( hr != -1 && !SUCCEEDED(hr) ) { //The thread gets here, if the data is requested faster than the device can deliver it. //This may happen from time to time. MITK_DEBUG << "HR result false in KinectV2Controller::GetAllData()"; } } void KinectV2Controller::GetAmplitudes( float* amplitudes ) { if(!InitializeMultiFrameReader()) { MITK_ERROR << "Unable to initialize MultiFrameReader"; return; } IMultiSourceFrame* pMultiSourceFrame = NULL; IInfraredFrame* pInfraRedFrame = NULL; HRESULT hr = -1; static DWORD lastTime = 0; DWORD currentTime = GetTickCount(); //Check if we do not request data faster than 30 FPS. Kinect V2 can only deliver 30 FPS. if( unsigned int(currentTime - lastTime) > 33 ) { hr = d->m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame); lastTime = currentTime; } if (SUCCEEDED(hr)) { IInfraredFrameReference* pInfraredFrameReference = NULL; hr = pMultiSourceFrame->get_InfraredFrameReference(&pInfraredFrameReference); if (SUCCEEDED(hr)) { hr = pInfraredFrameReference->AcquireFrame(&pInfraRedFrame); } SafeRelease(pInfraredFrameReference); } if (SUCCEEDED(hr)) { UINT nDepthBufferSize = 0; UINT16 *pInfraRedBuffer = NULL; if (SUCCEEDED(hr)) { hr = pInfraRedFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pInfraRedBuffer); } if (SUCCEEDED(hr)) { for(int i = 0; i < d->m_DepthCaptureHeight*d->m_DepthCaptureWidth; ++i) { amplitudes[i] = static_cast(*pInfraRedBuffer); ++pInfraRedBuffer; } } else { MITK_ERROR << "AccessUnderlyingBuffer"; } } SafeRelease(pInfraRedFrame); SafeRelease(pMultiSourceFrame); if( hr != -1 && !SUCCEEDED(hr) ) { //The thread gets here, if the data is requested faster than the device can deliver it. //This may happen from time to time. MITK_DEBUG << "HR result false in KinectV2Controller::GetAmplitudes()"; } } int KinectV2Controller::GetRGBCaptureWidth() const { return d->m_RGBCaptureWidth; } int KinectV2Controller::GetRGBCaptureHeight() const { return d->m_RGBCaptureHeight; } int KinectV2Controller::GetDepthCaptureWidth() const { return d->m_DepthCaptureWidth; } int KinectV2Controller::GetDepthCaptureHeight() const { return d->m_DepthCaptureHeight; } vtkSmartPointer KinectV2Controller::GetVtkPolyData() { return d->m_PolyData; } void KinectV2Controller::SetGenerateTriangularMesh(bool flag) { d->m_GenerateTriangularMesh = flag; } void KinectV2Controller::SetTriangulationThreshold(double triangulationThreshold) { this->d->m_TriangulationThreshold = triangulationThreshold * triangulationThreshold; } } diff --git a/Plugins/org.mitk.gui.qt.tofutil/src/internal/QmitkToFUtilView.cpp b/Plugins/org.mitk.gui.qt.tofutil/src/internal/QmitkToFUtilView.cpp index 9036e2a20c..3599c19a86 100644 --- a/Plugins/org.mitk.gui.qt.tofutil/src/internal/QmitkToFUtilView.cpp +++ b/Plugins/org.mitk.gui.qt.tofutil/src/internal/QmitkToFUtilView.cpp @@ -1,668 +1,668 @@ /*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ // Blueberry #include #include #include // Qmitk #include "QmitkToFUtilView.h" #include #include // Qt #include #include #include #include #include // MITK #include #include #include #include #include #include #include #include #include #include #include // VTK #include #include #include // ITK #include #include const std::string QmitkToFUtilView::VIEW_ID = "org.mitk.views.tofutil"; //Constructor QmitkToFUtilView::QmitkToFUtilView() : QmitkAbstractView() , m_Controls(NULL), m_MultiWidget( NULL ) , m_MitkDistanceImage(NULL), m_MitkAmplitudeImage(NULL), m_MitkIntensityImage(NULL), m_Surface(NULL) , m_DistanceImageNode(NULL), m_AmplitudeImageNode(NULL), m_IntensityImageNode(NULL), m_RGBImageNode(NULL), m_SurfaceNode(NULL) , m_ToFImageRecorder(NULL), m_ToFImageGrabber(NULL), m_ToFDistanceImageToSurfaceFilter(NULL), m_ToFCompositeFilter(NULL) , m_2DDisplayCount(0) , m_RealTimeClock(NULL) , m_StepsForFramerate(100) , m_2DTimeBefore(0.0) , m_2DTimeAfter(0.0) , m_CameraIntrinsics(NULL) { this->m_Frametimer = new QTimer(this); this->m_ToFDistanceImageToSurfaceFilter = mitk::ToFDistanceImageToSurfaceFilter::New(); this->m_ToFCompositeFilter = mitk::ToFCompositeFilter::New(); this->m_ToFImageRecorder = mitk::ToFImageRecorder::New(); } //Destructor, specifically calling OnToFCameraStopped() and OnToFCammeraDiconnected() QmitkToFUtilView::~QmitkToFUtilView() { OnToFCameraStopped(); OnToFCameraDisconnected(); } //Createing the PartControl Signal-Slot principal void QmitkToFUtilView::CreateQtPartControl( QWidget *parent ) { // build up qt view, unless already done if ( !m_Controls ) { // create GUI widgets from the Qt Designer's .ui file m_Controls = new Ui::QmitkToFUtilViewControls; m_Controls->setupUi( parent ); //Looking for Input and Defining reaction connect(m_Frametimer, SIGNAL(timeout()), this, SLOT(OnUpdateCamera())); connect( (QObject*)(m_Controls->m_ToFConnectionWidget), SIGNAL(KinectAcquisitionModeChanged()), this, SLOT(OnKinectAcquisitionModeChanged()) ); // Todo in Widget2 connect( (QObject*)(m_Controls->m_ToFConnectionWidget), SIGNAL(ToFCameraConnected()), this, SLOT(OnToFCameraConnected()) ); connect( (QObject*)(m_Controls->m_ToFConnectionWidget), SIGNAL(ToFCameraDisconnected()), this, SLOT(OnToFCameraDisconnected()) ); connect( (QObject*)(m_Controls->m_ToFConnectionWidget), SIGNAL(ToFCameraSelected(const QString)), this, SLOT(OnToFCameraSelected(const QString)) ); connect( (QObject*)(m_Controls->m_ToFRecorderWidget), SIGNAL(ToFCameraStarted()), this, SLOT(OnToFCameraStarted()) ); connect( (QObject*)(m_Controls->m_ToFRecorderWidget), SIGNAL(ToFCameraStopped()), this, SLOT(OnToFCameraStopped()) ); connect( (QObject*)(m_Controls->m_ToFRecorderWidget), SIGNAL(RecordingStarted()), this, SLOT(OnToFCameraStopped()) ); connect( (QObject*)(m_Controls->m_ToFRecorderWidget), SIGNAL(RecordingStopped()), this, SLOT(OnToFCameraStarted()) ); connect( (QObject*)(m_Controls->m_SurfaceCheckBox), SIGNAL(toggled(bool)), this, SLOT(OnSurfaceCheckboxChecked(bool)) ); connect( (QObject*)(m_Controls->m_TextureCheckBox), SIGNAL(toggled(bool)), this, SLOT(OnTextureCheckBoxChecked(bool)) ); connect( (QObject*)(m_Controls->m_KinectTextureCheckBox), SIGNAL(toggled(bool)), this, SLOT(OnKinectRGBTextureCheckBoxChecked(bool)) ); connect( (QObject*)(m_Controls->m_GenerateTriangularMeshCheckBox), SIGNAL(toggled(bool)), this, SLOT(OnTriangulationCheckBoxChanged()) ); connect( (QObject*)(m_Controls->m_TriangulationThreshold), SIGNAL(valueChanged(double)), this, SLOT(OnTriangulationThresholdSpinBoxChanged()) ); } } //SetFocus-Method -> actually seting Focus to the Recorder void QmitkToFUtilView::SetFocus() { m_Controls->m_ToFRecorderWidget->setFocus(); } //Activated-Method->Generating RenderWindow void QmitkToFUtilView::Activated() { //get the current RenderWindowPart or open a new one if there is none if(this->GetRenderWindowPart(OPEN)) { mitk::ILinkedRenderWindowPart* linkedRenderWindowPart = dynamic_cast(this->GetRenderWindowPart()); if(linkedRenderWindowPart == 0) { MITK_ERROR << "No linked StdMultiWidget avaiable!!!"; } else { linkedRenderWindowPart->EnableSlicingPlanes(false); } GetRenderWindowPart()->GetQmitkRenderWindow("axial")->GetSliceNavigationController()->SetDefaultViewDirection(mitk::SliceNavigationController::Axial); GetRenderWindowPart()->GetQmitkRenderWindow("axial")->GetSliceNavigationController()->SliceLockedOn(); GetRenderWindowPart()->GetQmitkRenderWindow("sagittal")->GetSliceNavigationController()->SetDefaultViewDirection(mitk::SliceNavigationController::Axial); GetRenderWindowPart()->GetQmitkRenderWindow("sagittal")->GetSliceNavigationController()->SliceLockedOn(); GetRenderWindowPart()->GetQmitkRenderWindow("coronal")->GetSliceNavigationController()->SetDefaultViewDirection(mitk::SliceNavigationController::Axial); GetRenderWindowPart()->GetQmitkRenderWindow("coronal")->GetSliceNavigationController()->SliceLockedOn(); this->GetRenderWindowPart()->GetRenderingManager()->InitializeViews(); this->UseToFVisibilitySettings(true); if (this->m_ToFCompositeFilter) { m_Controls->m_ToFCompositeFilterWidget->SetToFCompositeFilter(this->m_ToFCompositeFilter); } if (this->GetDataStorage()) { m_Controls->m_ToFCompositeFilterWidget->SetDataStorage(this->GetDataStorage()); } if (this->m_ToFImageGrabber.IsNull()) { m_Controls->m_ToFRecorderWidget->setEnabled(false); m_Controls->m_ToFVisualisationSettingsWidget->setEnabled(false); m_Controls->m_ToFCompositeFilterWidget->setEnabled(false); m_Controls->tofMeasurementWidget->setEnabled(false); m_Controls->SurfacePropertiesBox->setEnabled(false); } } } //ZomnnieView-Method -> Resetting GUI to default. Why not just QmitkToFUtilView()?! void QmitkToFUtilView::ActivatedZombieView(berry::IWorkbenchPartReference::Pointer /*zombieView*/) { ResetGUIToDefault(); } void QmitkToFUtilView::Deactivated() { } void QmitkToFUtilView::Visible() { } //Reset of the ToFUtilView void QmitkToFUtilView::Hidden() { ResetGUIToDefault(); } void QmitkToFUtilView::OnToFCameraConnected() { MITK_DEBUG <<"OnToFCameraConnected"; this->m_2DDisplayCount = 0; this->m_ToFImageGrabber = m_Controls->m_ToFConnectionWidget->GetToFImageGrabber(); // initialize surface generation this->m_ToFDistanceImageToSurfaceFilter = mitk::ToFDistanceImageToSurfaceFilter::New(); // initialize ToFImageRecorder and ToFRecorderWidget this->m_ToFImageRecorder = mitk::ToFImageRecorder::New(); this->m_ToFImageRecorder->SetCameraDevice(this->m_ToFImageGrabber->GetCameraDevice()); m_Controls->m_ToFRecorderWidget->SetParameter(this->m_ToFImageGrabber, this->m_ToFImageRecorder); m_Controls->m_ToFRecorderWidget->setEnabled(true); m_Controls->m_ToFRecorderWidget->ResetGUIToInitial(); m_Controls->m_ToFVisualisationSettingsWidget->setEnabled(false); // initialize ToFCompositeFilterWidget this->m_ToFCompositeFilter = mitk::ToFCompositeFilter::New(); if (this->m_ToFCompositeFilter) { m_Controls->m_ToFCompositeFilterWidget->SetToFCompositeFilter(this->m_ToFCompositeFilter); } if (this->GetDataStorage()) { m_Controls->m_ToFCompositeFilterWidget->SetDataStorage(this->GetDataStorage()); } // initialize measurement widget m_Controls->tofMeasurementWidget->InitializeWidget(this->GetRenderWindowPart()->GetQmitkRenderWindows(),this->GetDataStorage(), this->m_ToFDistanceImageToSurfaceFilter->GetCameraIntrinsics()); this->m_RealTimeClock = mitk::RealTimeClock::New(); this->m_2DTimeBefore = this->m_RealTimeClock->GetCurrentStamp(); this->RequestRenderWindowUpdate(); } void QmitkToFUtilView::ResetGUIToDefault() { if(this->GetRenderWindowPart()) { mitk::ILinkedRenderWindowPart* linkedRenderWindowPart = dynamic_cast(this->GetRenderWindowPart()); if(linkedRenderWindowPart == 0) { MITK_ERROR << "No linked StdMultiWidget avaiable!!!"; } else { linkedRenderWindowPart->EnableSlicingPlanes(true); } GetRenderWindowPart()->GetQmitkRenderWindow("axial")->GetSliceNavigationController()->SetDefaultViewDirection(mitk::SliceNavigationController::Axial); GetRenderWindowPart()->GetQmitkRenderWindow("axial")->GetSliceNavigationController()->SliceLockedOff(); GetRenderWindowPart()->GetQmitkRenderWindow("sagittal")->GetSliceNavigationController()->SetDefaultViewDirection(mitk::SliceNavigationController::Sagittal); GetRenderWindowPart()->GetQmitkRenderWindow("sagittal")->GetSliceNavigationController()->SliceLockedOff(); GetRenderWindowPart()->GetQmitkRenderWindow("coronal")->GetSliceNavigationController()->SetDefaultViewDirection(mitk::SliceNavigationController::Frontal); GetRenderWindowPart()->GetQmitkRenderWindow("coronal")->GetSliceNavigationController()->SliceLockedOff(); this->UseToFVisibilitySettings(false); //global reinit this->GetRenderWindowPart()->GetRenderingManager()->InitializeViews(); this->RequestRenderWindowUpdate(); } } void QmitkToFUtilView::OnToFCameraDisconnected() { m_Controls->m_ToFRecorderWidget->OnStop(); m_Controls->m_ToFRecorderWidget->setEnabled(false); m_Controls->m_ToFVisualisationSettingsWidget->setEnabled(false); m_Controls->tofMeasurementWidget->setEnabled(false); m_Controls->SurfacePropertiesBox->setEnabled(false); //clean up measurement widget m_Controls->tofMeasurementWidget->CleanUpWidget(); } void QmitkToFUtilView::OnKinectAcquisitionModeChanged() { if (m_ToFCompositeFilter.IsNotNull()&&m_ToFImageGrabber.IsNotNull()) { if (m_SelectedCamera.contains("Kinect")) { if (m_ToFImageGrabber->GetBoolProperty("RGB")) { this->m_RGBImageNode = ReplaceNodeData("RGB image",this->m_ToFImageGrabber->GetOutput(3)); this->m_ToFDistanceImageToSurfaceFilter->SetInput(3,this->m_ToFImageGrabber->GetOutput(3)); } else if (m_ToFImageGrabber->GetBoolProperty("IR")) { this->m_MitkAmplitudeImage = m_ToFCompositeFilter->GetOutput(1); this->m_AmplitudeImageNode = ReplaceNodeData("Amplitude image",m_MitkAmplitudeImage); } } this->UseToFVisibilitySettings(true); } } void QmitkToFUtilView::OnToFCameraStarted() { if (m_ToFImageGrabber.IsNotNull()) { // initialize camera intrinsics if (this->m_ToFImageGrabber->GetProperty("CameraIntrinsics")) { m_CameraIntrinsics = dynamic_cast(this->m_ToFImageGrabber->GetProperty("CameraIntrinsics"))->GetValue(); MITK_INFO << m_CameraIntrinsics->ToString(); } else { m_CameraIntrinsics = NULL; MITK_ERROR << "No camera intrinsics were found!"; } // set camera intrinsics if ( m_CameraIntrinsics.IsNotNull() ) { this->m_ToFDistanceImageToSurfaceFilter->SetCameraIntrinsics(m_CameraIntrinsics); } // initial update of image grabber this->m_ToFImageGrabber->Update(); this->m_ToFCompositeFilter->SetInput(0,this->m_ToFImageGrabber->GetOutput(0)); this->m_ToFCompositeFilter->SetInput(1,this->m_ToFImageGrabber->GetOutput(1)); this->m_ToFCompositeFilter->SetInput(2,this->m_ToFImageGrabber->GetOutput(2)); // initial update of composite filter this->m_ToFCompositeFilter->Update(); this->m_MitkDistanceImage = m_ToFCompositeFilter->GetOutput(); this->m_DistanceImageNode = ReplaceNodeData("Distance image",m_MitkDistanceImage); std::string rgbFileName; m_ToFImageGrabber->GetCameraDevice()->GetStringProperty("RGBImageFileName",rgbFileName); bool hasRGBImage = false; m_ToFImageGrabber->GetCameraDevice()->GetBoolProperty("HasRGBImage",hasRGBImage); bool hasIntensityImage = false; m_ToFImageGrabber->GetCameraDevice()->GetBoolProperty("HasIntensityImage",hasIntensityImage); bool hasAmplitudeImage = false; m_ToFImageGrabber->GetCameraDevice()->GetBoolProperty("HasAmplitudeImage",hasAmplitudeImage); bool KinectReconstructionMode = false; m_ToFImageGrabber->GetCameraDevice()->GetBoolProperty("KinectReconstructionMode",KinectReconstructionMode); if(KinectReconstructionMode) { //set the reconstruction mode for kinect this->m_ToFDistanceImageToSurfaceFilter->SetReconstructionMode(mitk::ToFDistanceImageToSurfaceFilter::Kinect); } if (m_CameraIntrinsics.IsNotNull()) { m_ToFDistanceImageToSurfaceFilter->SetCameraIntrinsics(m_CameraIntrinsics); } if(hasRGBImage || (rgbFileName!="")) { if(m_ToFImageGrabber->GetBoolProperty("IR")) { this->m_MitkAmplitudeImage = m_ToFCompositeFilter->GetOutput(1); } else { this->m_RGBImageNode = ReplaceNodeData("RGB image",this->m_ToFImageGrabber->GetOutput(3)); } } else { this->m_RGBImageNode = NULL; } if(hasAmplitudeImage) { this->m_MitkAmplitudeImage = m_ToFCompositeFilter->GetOutput(1); this->m_AmplitudeImageNode = ReplaceNodeData("Amplitude image",m_MitkAmplitudeImage); } if(hasIntensityImage) { this->m_MitkIntensityImage = m_ToFCompositeFilter->GetOutput(2); this->m_IntensityImageNode = ReplaceNodeData("Intensity image",m_MitkIntensityImage); } this->m_ToFDistanceImageToSurfaceFilter->SetInput(0,m_MitkDistanceImage); this->m_ToFDistanceImageToSurfaceFilter->SetInput(1,m_MitkAmplitudeImage); this->m_ToFDistanceImageToSurfaceFilter->SetInput(2,m_MitkIntensityImage); bool hasSurface = false; m_ToFImageGrabber->GetCameraDevice()->GetBoolProperty("HasSurface", hasSurface); if(hasSurface) { this->m_Surface = mitk::Surface::New(); } else { this->m_Surface = this->m_ToFDistanceImageToSurfaceFilter->GetOutput(0); } this->m_SurfaceNode = ReplaceNodeData("Surface",m_Surface); this->UseToFVisibilitySettings(true); m_Controls->m_ToFCompositeFilterWidget->UpdateFilterParameter(); // initialize visualization widget m_Controls->m_ToFVisualisationSettingsWidget->Initialize(this->m_DistanceImageNode, this->m_AmplitudeImageNode, this->m_IntensityImageNode); // set distance image to measurement widget m_Controls->tofMeasurementWidget->SetDistanceImage(m_MitkDistanceImage); this->m_Frametimer->start(0); m_Controls->m_ToFVisualisationSettingsWidget->setEnabled(true); m_Controls->m_ToFCompositeFilterWidget->setEnabled(true); m_Controls->tofMeasurementWidget->setEnabled(true); m_Controls->SurfacePropertiesBox->setEnabled(true); if (m_Controls->m_TextureCheckBox->isChecked()) { OnTextureCheckBoxChecked(true); } if (m_Controls->m_KinectTextureCheckBox->isChecked()) { OnKinectRGBTextureCheckBoxChecked(true); } } m_Controls->m_TextureCheckBox->setEnabled(true); } void QmitkToFUtilView::OnToFCameraStopped() { m_Controls->m_ToFVisualisationSettingsWidget->setEnabled(false); m_Controls->m_ToFCompositeFilterWidget->setEnabled(false); m_Controls->SurfacePropertiesBox->setEnabled(false); this->m_Frametimer->stop(); } void QmitkToFUtilView::OnToFCameraSelected(const QString selected) { m_SelectedCamera = selected; if (selected.contains("O3D")) { MITK_INFO<<"Surface representation currently not available for CamBoard and O3. Intrinsic parameters missing."; this->m_Controls->m_SurfaceCheckBox->setEnabled(false); this->m_Controls->m_TextureCheckBox->setEnabled(false); this->m_Controls->m_KinectTextureCheckBox->setEnabled(false); this->m_Controls->m_SurfaceCheckBox->setChecked(false); this->m_Controls->m_TextureCheckBox->setChecked(false); this->m_Controls->m_KinectTextureCheckBox->setChecked(false); } else { this->m_Controls->m_SurfaceCheckBox->setEnabled(true); this->m_Controls->m_TextureCheckBox->setEnabled(true); this->m_Controls->m_KinectTextureCheckBox->setEnabled(true); } } void QmitkToFUtilView::OnTriangulationThresholdSpinBoxChanged() { this->m_ToFDistanceImageToSurfaceFilter->SetTriangulationThreshold( this->m_Controls->m_TriangulationThreshold->value() ); this->m_ToFImageGrabber->GetCameraDevice()->SetFloatProperty("TriangulationThreshold", this->m_Controls->m_TriangulationThreshold->value()); } void QmitkToFUtilView::OnTriangulationCheckBoxChanged() { this->m_ToFDistanceImageToSurfaceFilter->SetGenerateTriangularMesh(this->m_Controls->m_GenerateTriangularMeshCheckBox->isChecked()); this->m_ToFImageGrabber->GetCameraDevice()->SetBoolProperty("GenerateTriangularMesh", this->m_Controls->m_GenerateTriangularMeshCheckBox->isChecked()); } void QmitkToFUtilView::OnSurfaceCheckboxChecked(bool checked) { if(checked) { //initialize the surface once MITK_DEBUG << "OnSurfaceCheckboxChecked true"; this->m_SurfaceNode->SetData(this->m_Surface); this->m_ToFDistanceImageToSurfaceFilter->SetTriangulationThreshold( this->m_Controls->m_TriangulationThreshold->value() ); this->m_ToFImageGrabber->GetCameraDevice()->SetFloatProperty("TriangulationThreshold", this->m_Controls->m_TriangulationThreshold->value()); this->m_ToFDistanceImageToSurfaceFilter->SetGenerateTriangularMesh(this->m_Controls->m_GenerateTriangularMeshCheckBox->isChecked()); this->m_ToFImageGrabber->GetCameraDevice()->SetBoolProperty("GenerateTriangularMesh", this->m_Controls->m_GenerateTriangularMeshCheckBox->isChecked()); //we need to initialize (reinit) the surface, to make it fit into the renderwindow this->GetRenderWindowPart()->GetRenderingManager()->InitializeViews( this->m_Surface->GetTimeGeometry(), mitk::RenderingManager::REQUEST_UPDATE_3DWINDOWS, true); // correctly place the vtk camera for appropriate surface rendering vtkCamera* camera3d = GetRenderWindowPart()->GetQmitkRenderWindow("3d")->GetRenderer()->GetVtkRenderer()->GetActiveCamera(); //1m distance to camera should be a nice default value for most cameras camera3d->SetPosition(0,0,0); - camera3d->SetViewUp(0,1,0); + camera3d->SetViewUp(0,-1,0); camera3d->SetFocalPoint(0,0,1); if (this->m_CameraIntrinsics.IsNotNull()) { // compute view angle from camera intrinsics camera3d->SetViewAngle(mitk::ToFProcessingCommon::CalculateViewAngle(m_CameraIntrinsics,m_ToFImageGrabber->GetCaptureWidth())); } else { camera3d->SetViewAngle(45); } GetRenderWindowPart()->GetQmitkRenderWindow("3d")->GetRenderer()->GetVtkRenderer()->ResetCameraClippingRange(); } } void QmitkToFUtilView::OnUpdateCamera() { //##### Code for surface ##### if (m_Controls->m_SurfaceCheckBox->isChecked()) { // update surface m_ToFDistanceImageToSurfaceFilter->SetTextureIndex(m_Controls->m_ToFVisualisationSettingsWidget->GetSelectedImageIndex()); bool hasSurface = false; this->m_ToFImageGrabber->GetCameraDevice()->GetBoolProperty("HasSurface", hasSurface); if(hasSurface) { mitk::SmartPointerProperty::Pointer surfaceProp = dynamic_cast< mitk::SmartPointerProperty * >(this->m_ToFImageGrabber->GetCameraDevice()->GetProperty("ToFSurface")); this->m_Surface->SetVtkPolyData( dynamic_cast< mitk::Surface* >( surfaceProp->GetSmartPointer().GetPointer() )->GetVtkPolyData() ); } //update pipeline this->m_Surface->Update(); } //##### End code for surface ##### else { // update pipeline this->m_MitkDistanceImage->Update(); } this->RequestRenderWindowUpdate(); this->m_2DDisplayCount++; if ((this->m_2DDisplayCount % this->m_StepsForFramerate) == 0) { this->m_2DTimeAfter = this->m_RealTimeClock->GetCurrentStamp() - this->m_2DTimeBefore; MITK_INFO << " 2D-Display-framerate (fps): " << this->m_StepsForFramerate / (this->m_2DTimeAfter/1000); this->m_2DTimeBefore = this->m_RealTimeClock->GetCurrentStamp(); } } void QmitkToFUtilView::OnTextureCheckBoxChecked(bool checked) { if(m_SurfaceNode.IsNotNull()) { if (checked) { mitk::TransferFunction::Pointer transferFunction = mitk::TransferFunction::New(); transferFunction->SetColorTransferFunction(m_Controls->m_ToFVisualisationSettingsWidget->GetSelectedColorTransferFunction()); this->m_SurfaceNode->SetProperty("Surface.TransferFunction", mitk::TransferFunctionProperty::New(transferFunction)); this->m_SurfaceNode->SetBoolProperty("scalar visibility", true); } else { this->m_SurfaceNode->SetBoolProperty("scalar visibility", false); } } } void QmitkToFUtilView::OnKinectRGBTextureCheckBoxChecked(bool checked) { if((m_SelectedCamera.contains("Kinect")) && (m_ToFImageGrabber->GetBoolProperty("RGB"))) { if (checked) { // enable texture this->m_SurfaceNode->SetProperty("Surface.Texture",mitk::SmartPointerProperty::New(this->m_ToFImageGrabber->GetOutput(3))); } else { // disable texture this->m_SurfaceNode->GetPropertyList()->DeleteProperty("Surface.Texture"); } } } void QmitkToFUtilView::OnChangeCoronalWindowOutput(int index) { this->OnToFCameraStopped(); if(index == 0) { if(this->m_IntensityImageNode.IsNotNull()) this->m_IntensityImageNode->SetVisibility(false); if(this->m_RGBImageNode.IsNotNull()) this->m_RGBImageNode->SetVisibility(true); } else if(index == 1) { if(this->m_IntensityImageNode.IsNotNull()) this->m_IntensityImageNode->SetVisibility(true); if(this->m_RGBImageNode.IsNotNull()) this->m_RGBImageNode->SetVisibility(false); } this->RequestRenderWindowUpdate(); this->OnToFCameraStarted(); } mitk::DataNode::Pointer QmitkToFUtilView::ReplaceNodeData( std::string nodeName, mitk::BaseData* data ) { mitk::DataNode::Pointer node = this->GetDataStorage()->GetNamedNode(nodeName); if (node.IsNull()) { node = mitk::DataNode::New(); node->SetData(data); node->SetName(nodeName); node->SetBoolProperty("binary",false); this->GetDataStorage()->Add(node); } else { node->SetData(data); } return node; } void QmitkToFUtilView::UseToFVisibilitySettings(bool useToF) { //We need this property for every node. mitk::RenderingModeProperty::Pointer renderingModePropertyForTransferFunction = mitk::RenderingModeProperty::New(mitk::RenderingModeProperty::COLORTRANSFERFUNCTION_COLOR); // set node properties if (m_DistanceImageNode.IsNotNull()) { this->m_DistanceImageNode->SetProperty( "visible" , mitk::BoolProperty::New( true )); this->m_DistanceImageNode->SetVisibility( !useToF, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("sagittal")->GetRenderWindow() ) ); this->m_DistanceImageNode->SetVisibility( !useToF, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("coronal")->GetRenderWindow() ) ); this->m_DistanceImageNode->SetVisibility( !useToF, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("3d")->GetRenderWindow() ) ); this->m_DistanceImageNode->SetProperty("Image Rendering.Mode", renderingModePropertyForTransferFunction); } if (m_AmplitudeImageNode.IsNotNull()) { this->m_AmplitudeImageNode->SetVisibility( !useToF, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("axial")->GetRenderWindow() ) ); this->m_AmplitudeImageNode->SetVisibility( !useToF, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("coronal")->GetRenderWindow() ) ); this->m_AmplitudeImageNode->SetVisibility( !useToF, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("3d")->GetRenderWindow() ) ); this->m_AmplitudeImageNode->SetProperty("Image Rendering.Mode", renderingModePropertyForTransferFunction); } if (m_IntensityImageNode.IsNotNull()) { this->m_IntensityImageNode->SetProperty( "visible" , mitk::BoolProperty::New( true )); this->m_IntensityImageNode->SetVisibility( !useToF, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("axial")->GetRenderWindow() ) ); this->m_IntensityImageNode->SetVisibility( !useToF, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("sagittal")->GetRenderWindow() ) ); this->m_IntensityImageNode->SetVisibility( !useToF, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("3d")->GetRenderWindow() ) ); this->m_IntensityImageNode->SetProperty("Image Rendering.Mode", renderingModePropertyForTransferFunction); } if ((m_RGBImageNode.IsNotNull())) { this->m_RGBImageNode->SetProperty( "visible" , mitk::BoolProperty::New( true )); this->m_RGBImageNode->SetVisibility( !useToF, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("axial")->GetRenderWindow() ) ); this->m_RGBImageNode->SetVisibility( !useToF, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("sagittal")->GetRenderWindow() ) ); this->m_RGBImageNode->SetVisibility( !useToF, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("3d")->GetRenderWindow() ) ); } // initialize images if (m_MitkDistanceImage.IsNotNull()) { this->GetRenderWindowPart()->GetRenderingManager()->InitializeViews( this->m_MitkDistanceImage->GetTimeGeometry(), mitk::RenderingManager::REQUEST_UPDATE_2DWINDOWS, true); } if(this->m_SurfaceNode.IsNotNull()) { QHash renderWindowHashMap = this->GetRenderWindowPart()->GetQmitkRenderWindows(); QHashIterator i(renderWindowHashMap); while (i.hasNext()){ i.next(); this->m_SurfaceNode->SetVisibility( false, mitk::BaseRenderer::GetInstance(i.value()->GetRenderWindow()) ); } this->m_SurfaceNode->SetVisibility( true, mitk::BaseRenderer::GetInstance(GetRenderWindowPart()->GetQmitkRenderWindow("3d")->GetRenderWindow() ) ); } //disable/enable gradient background this->GetRenderWindowPart()->EnableDecorations(!useToF, QStringList(QString("background"))); if((this->m_RGBImageNode.IsNotNull())) { bool RGBImageHasDifferentResolution = false; m_ToFImageGrabber->GetCameraDevice()->GetBoolProperty("RGBImageHasDifferentResolution",RGBImageHasDifferentResolution); if(RGBImageHasDifferentResolution) { //update the display geometry by using the RBG image node. Only for renderwindow coronal mitk::RenderingManager::GetInstance()->InitializeView( GetRenderWindowPart()->GetQmitkRenderWindow("coronal")->GetRenderWindow(), this->m_RGBImageNode->GetData()->GetGeometry() ); } } }