diff --git a/Modules/OpenCL/mitkOclDataSet.cpp b/Modules/OpenCL/mitkOclDataSet.cpp index eaebdf5d42..b35851255c 100644 --- a/Modules/OpenCL/mitkOclDataSet.cpp +++ b/Modules/OpenCL/mitkOclDataSet.cpp @@ -1,160 +1,170 @@ /*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #include "mitkOclDataSet.h" #include "mitkCommon.h" #include "mitkLogMacros.h" #include "mitkOclUtils.h" #include mitk::OclDataSet::OclDataSet() : m_gpuBuffer(nullptr), m_context(nullptr), m_bufferSize(0), m_gpuModified(false), m_cpuModified(false), m_Data(nullptr), m_BpE(1) { } mitk::OclDataSet::~OclDataSet() { MITK_INFO << "OclDataSet Destructor"; //release GMEM Image buffer if (m_gpuBuffer) clReleaseMemObject(m_gpuBuffer); } cl_mem mitk::OclDataSet::CreateGPUBuffer() { MITK_INFO << "InitializeGPUBuffer call with: BPE=" << m_BpE; us::ServiceReference ref = GetModuleContext()->GetServiceReference(); OclResourceService* resources = GetModuleContext()->GetService(ref); m_context = resources->GetContext(); int clErr; if (m_gpuBuffer) clReleaseMemObject(m_gpuBuffer); - m_gpuBuffer = clCreateBuffer(m_context, CL_MEM_READ_WRITE, m_bufferSize * m_BpE, nullptr, &clErr); + m_gpuBuffer = clCreateBuffer(m_context, CL_MEM_READ_WRITE, m_bufferSize * (size_t)m_BpE, nullptr, &clErr); - MITK_INFO << "Created GPU Buffer Object of size: " << m_BpE* m_bufferSize << " Bytes"; + MITK_INFO << "Created GPU Buffer Object of size: " << (size_t)m_BpE * m_bufferSize << " Bytes"; CHECK_OCL_ERR(clErr); + if (clErr != CL_SUCCESS) + mitkThrow() << "openCL Error when creating Buffer"; + return m_gpuBuffer; } bool mitk::OclDataSet::IsModified(int _type) { if (_type) return m_cpuModified; else return m_gpuModified; } void mitk::OclDataSet::Modified(int _type) { // defines... GPU: 0, CPU: 1 m_cpuModified = _type; m_gpuModified = !_type; } int mitk::OclDataSet::TransferDataToGPU(cl_command_queue gpuComQueue) { cl_int clErr = 0; // check whether an image present if (m_Data == nullptr){ MITK_ERROR("ocl.DataSet") << "(mitk) No data present!\n"; return -1; } // there is a need for copy only if RAM-Data newer then GMEM data if (m_cpuModified) { //check the buffer if(m_gpuBuffer == nullptr) { CreateGPUBuffer(); } if (m_gpuBuffer != nullptr) { - clErr = clEnqueueWriteBuffer(gpuComQueue, m_gpuBuffer, CL_TRUE, 0, m_bufferSize * m_BpE, m_Data, 0, NULL, NULL); + clErr = clEnqueueWriteBuffer(gpuComQueue, m_gpuBuffer, CL_TRUE, 0, m_bufferSize * (size_t)m_BpE, m_Data, 0, NULL, NULL); MITK_INFO << "Wrote Data to GPU Buffer Object."; + + CHECK_OCL_ERR(clErr); + m_gpuModified = true; + + if (clErr != CL_SUCCESS) + mitkThrow() << "openCL Error when writing Buffer"; } else { MITK_ERROR << "No GPU buffer present!"; } - CHECK_OCL_ERR(clErr); - m_gpuModified = true; } return clErr; } cl_mem mitk::OclDataSet::GetGPUBuffer() { // clGetMemObjectInfo() cl_mem_object_type memInfo; cl_int clErr = 0; // query image object info only if already initialized if( this->m_gpuBuffer ) { clErr = clGetMemObjectInfo(this->m_gpuBuffer, CL_MEM_TYPE, sizeof(cl_mem_object_type), &memInfo, nullptr ); CHECK_OCL_ERR(clErr); } MITK_INFO << "Querying info for object, recieving: " << memInfo; return m_gpuBuffer; } void* mitk::OclDataSet::TransferDataToCPU(cl_command_queue gpuComQueue) { cl_int clErr = 0; // if image created on GPU, needs to create mitk::Image if( m_gpuBuffer == nullptr ){ MITK_ERROR("ocl.DataSet") << "(mitk) No buffer present!\n"; return nullptr; } // check buffersize - char* data = new char[m_bufferSize * m_BpE]; + char* data = new char[m_bufferSize * (size_t)m_BpE]; // debug info oclPrintMemObjectInfo( m_gpuBuffer ); - clErr = clEnqueueReadBuffer( gpuComQueue, m_gpuBuffer, CL_TRUE, 0, m_bufferSize * m_BpE, data ,0, nullptr, nullptr); + clErr = clEnqueueReadBuffer( gpuComQueue, m_gpuBuffer, CL_TRUE, 0, m_bufferSize * (size_t)m_BpE, data ,0, nullptr, nullptr); CHECK_OCL_ERR(clErr); + if(clErr != CL_SUCCESS) + mitkThrow() << "openCL Error when reading Output Buffer"; + clFlush( gpuComQueue ); // the cpu data is same as gpu this->m_gpuModified = false; return (void*) data; } -void mitk::OclDataSet::SetBufferSize(unsigned int size) +void mitk::OclDataSet::SetBufferSize(size_t size) { m_bufferSize = size; } void mitk::OclDataSet::SetBpE(unsigned short BpE) { m_BpE = BpE; } \ No newline at end of file diff --git a/Modules/OpenCL/mitkOclDataSet.h b/Modules/OpenCL/mitkOclDataSet.h index 1bc5dcadde..773510e47c 100644 --- a/Modules/OpenCL/mitkOclDataSet.h +++ b/Modules/OpenCL/mitkOclDataSet.h @@ -1,134 +1,134 @@ /*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #ifndef __mitkOclDataSet_h #define __mitkOclDataSet_h #define GPU_DATA 0 #define CPU_DATA 1 #include #include "MitkOpenCLExports.h" #include "mitkOclBaseData.h" #include "mitkOpenCLActivator.h" #include namespace mitk { /*! * \brief Class implementing processing of arbitrary data sets for GPU Image Processing * * The class holds a pointer to the data stored in RAM and performs an * on-demand-copy to the graphics memory. It is the basic data structure for all * mitk::oclDataSetToDataSetFilter classes */ class MITKOPENCL_EXPORT OclDataSet : public OclBaseData { public: mitkClassMacro(OclDataSet, OclBaseData); itkFactorylessNewMacro(Self) itkCloneMacro(Self) /*! \brief Copies the RAM-stored data to GMEM */ virtual int TransferDataToGPU(cl_command_queue); /*! \brief Copies the in GMEM stored data to RAM */ virtual void* TransferDataToCPU(cl_command_queue); /*! \brief Returns the pointer to the referenced data */ void* GetData() { return m_Data; } /** Returns the pointer to the GPU buffer */ cl_mem GetGPUBuffer(); /** Create the GPU buffer for image * */ cl_mem CreateGPUBuffer(); /** \brief Returns the status of the image buffer * * @param _type The flag to specify the buffer type ( GPU / CPU ) */ bool IsModified(int _type); using OclBaseData::Modified; /** \brief Set the modified flag for one of the buffers * * @param _type The flag to specify the buffer type ( GPU / CPU ) */ void Modified(int _type); /** \brief Initialze the OclDataSet with data. */ void SetData(void* data) { this->m_cpuModified = true; this->m_gpuModified = false; m_Data = data; } /*! \brief returns the amount of elements in the DataSet */ - int GetBufferSize() const + size_t GetBufferSize() const { return this->m_bufferSize; } short GetBytesPerElement() const { return this->m_BpE; } /** @brief Set the amount of elements in buffer*/ - void SetBufferSize(unsigned int size); + void SetBufferSize(size_t size); /** @brief Set the DataSet memory Size per Element in Bytes*/ void SetBpE(unsigned short BpE); protected: /*! \brief Constructor */ OclDataSet(); /** @brief Destructor */ virtual ~OclDataSet(); /*! GMEM Image buffer */ cl_mem m_gpuBuffer; /*! GPU Context the Buffer was created in, needed for access */ cl_context m_context; private: /*! GMEM Buffer Size in elements*/ - unsigned int m_bufferSize; + size_t m_bufferSize; bool m_gpuModified; bool m_cpuModified; /*! Reference to the data */ void* m_Data; /*! Bytes per Element in Buffer*/ unsigned short m_BpE; }; } #endif //__mitkOclDataSet_h diff --git a/Modules/PhotoacousticsAlgorithms/include/mitkPhotoacousticBeamformingFilter.h b/Modules/PhotoacousticsAlgorithms/include/mitkPhotoacousticBeamformingFilter.h index f4956b6859..1b28a9f648 100644 --- a/Modules/PhotoacousticsAlgorithms/include/mitkPhotoacousticBeamformingFilter.h +++ b/Modules/PhotoacousticsAlgorithms/include/mitkPhotoacousticBeamformingFilter.h @@ -1,80 +1,96 @@ /*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #ifndef MITK_PHOTOACOUSTICS_BEAMFORMING_FILTER #define MITK_PHOTOACOUSTICS_BEAMFORMING_FILTER #include "mitkImageToImageFilter.h" #include #include "./OpenCLFilter/mitkPhotoacousticOCLBeamformingFilter.h" #include "mitkPhotoacousticBeamformingSettings.h" namespace mitk { //##Documentation //## @brief //## @ingroup Process class BeamformingFilter : public ImageToImageFilter { public: mitkClassMacro(BeamformingFilter, ImageToImageFilter); itkFactorylessNewMacro(Self) itkCloneMacro(Self) - void Configure(BeamformingSettings settings); + void Configure(BeamformingSettings settings) + { + m_ConfOld = m_Conf; + m_Conf = settings; + } + + std::string GetMessageString() + { + return m_Message; + } void SetProgressHandle(std::function progressHandle); protected: BeamformingFilter(); ~BeamformingFilter(); virtual void GenerateInputRequestedRegion() override; virtual void GenerateOutputInformation() override; virtual void GenerateData() override; //##Description //## @brief Time when Header was last initialized itk::TimeStamp m_TimeOfHeaderInitialization; std::function m_ProgressHandle; float* VonHannFunction(int samples); float* HammFunction(int samples); float* BoxFunction(int samples); void DASQuadraticLine(float* input, float* output, float inputDim[2], float outputDim[2], const short& line, float* apodisation, const short& apodArraySize); void DASSphericalLine(float* input, float* output, float inputDim[2], float outputDim[2], const short& line, float* apodisation, const short& apodArraySize); void DMASQuadraticLine(float* input, float* output, float inputDim[2], float outputDim[2], const short& line, float* apodisation, const short& apodArraySize); void DMASSphericalLine(float* input, float* output, float inputDim[2], float outputDim[2], const short& line, float* apodisation, const short& apodArraySize); float* m_OutputData; float* m_InputData; float* m_InputDataPuffer; + float* m_VonHannFunction; + float* m_HammFunction; + float* m_BoxFunction; + BeamformingSettings m_Conf; + BeamformingSettings m_ConfOld; mitk::PhotoacousticOCLBeamformingFilter::Pointer m_BeamformingOclFilter; + + std::string m_Message; }; } // namespace mitk #endif //MITK_PHOTOACOUSTICS_BEAMFORMING_FILTER diff --git a/Modules/PhotoacousticsAlgorithms/include/mitkPhotoacousticBeamformingSettings.h b/Modules/PhotoacousticsAlgorithms/include/mitkPhotoacousticBeamformingSettings.h index 6e207b6e7d..a54bd07bb5 100644 --- a/Modules/PhotoacousticsAlgorithms/include/mitkPhotoacousticBeamformingSettings.h +++ b/Modules/PhotoacousticsAlgorithms/include/mitkPhotoacousticBeamformingSettings.h @@ -1,83 +1,84 @@ /*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #ifndef MITK_PHOTOACOUSTICS_BEAMFORMING_SETTINGS #define MITK_PHOTOACOUSTICS_BEAMFORMING_SETTINGS namespace mitk { //##Documentation //## @brief //## @ingroup Process class BeamformingSettings { public: float Pitch = 0.0003; // [m] float SpeedOfSound = 1540; // [m/s] unsigned int SamplesPerLine = 2048; unsigned int ReconstructionLines = 128; float RecordTime = 0.00006; // [s] float TimeSpacing = 0.0000000000001; // [s] unsigned short TransducerElements = 128; bool partial = false; unsigned int CropBounds[2] = { 0,0 }; unsigned int inputDim[3] = { 1,1,1 }; unsigned int upperCutoff = 0; bool UseGPU = true; enum DelayCalc { QuadApprox, Spherical }; DelayCalc DelayCalculationMethod = QuadApprox; enum Apodization { Hamm, Hann, Box }; Apodization Apod = Hann; enum BeamformingAlgorithm { DMAS, DAS }; BeamformingAlgorithm Algorithm = DAS; float Angle = 10; bool isPhotoacousticImage = true; float BPHighPass = 50; float BPLowPass = 50; bool UseBP = false; + int apodizationArraySize = 128; //this method ignores changes in BPLow/BPHigh/cropBounds/Algorithm/some more, as those are insignifiant in all current situations static bool SettingsChangedOpenCL(const BeamformingSettings& lhs, const BeamformingSettings& rhs) { return !(((lhs.Angle - rhs.Angle) < 0.001f) && (lhs.Apod == rhs.Apod) && (lhs.DelayCalculationMethod == rhs.DelayCalculationMethod) && (lhs.isPhotoacousticImage == rhs.isPhotoacousticImage) && ((lhs.Pitch - rhs.Pitch) < 0.0000001f) && (lhs.ReconstructionLines == rhs.ReconstructionLines) && (lhs.RecordTime - rhs.RecordTime) < 0.00000001f && (lhs.SamplesPerLine == rhs.SamplesPerLine) && ((lhs.SpeedOfSound - rhs.SpeedOfSound) < 0.01f) && ((lhs.TimeSpacing - rhs.TimeSpacing) < 0.0000000001f) && (lhs.TransducerElements == rhs.TransducerElements)); } static bool OutputDimensionsChanged(const BeamformingSettings& lhs, const BeamformingSettings& rhs) { return !((lhs.ReconstructionLines == rhs.ReconstructionLines) && (lhs.SamplesPerLine == rhs.SamplesPerLine) && (lhs.inputDim[2] == rhs.inputDim[2])); } }; } #endif \ No newline at end of file diff --git a/Modules/PhotoacousticsAlgorithms/include/mitkPhotoacousticImage.h b/Modules/PhotoacousticsAlgorithms/include/mitkPhotoacousticImage.h index 9f00543a22..5e579a2f29 100644 --- a/Modules/PhotoacousticsAlgorithms/include/mitkPhotoacousticImage.h +++ b/Modules/PhotoacousticsAlgorithms/include/mitkPhotoacousticImage.h @@ -1,53 +1,53 @@ /*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #ifndef mitkPhotoacousticImage_H_HEADER_INCLUDED #define mitkPhotoacousticImage_H_HEADER_INCLUDED #include "itkObject.h" #include "mitkCommon.h" #include "mitkImage.h" #include #include "mitkPhotoacousticBeamformingSettings.h" #include "mitkPhotoacousticBeamformingFilter.h" #include "MitkPhotoacousticsAlgorithmsExports.h" namespace mitk { class MITKPHOTOACOUSTICSALGORITHMS_EXPORT PhotoacousticImage : public itk::Object { public: mitkClassMacroItkParent(mitk::PhotoacousticImage, itk::Object); itkFactorylessNewMacro(Self); enum BModeMethod { ShapeDetection, Abs }; mitk::Image::Pointer ApplyBmodeFilter(mitk::Image::Pointer inputImage, BModeMethod method = BModeMethod::Abs, bool UseGPU = false, bool UseLogFilter = false, float resampleSpacing = 0.15); // mitk::Image::Pointer ApplyScatteringCompensation(mitk::Image::Pointer inputImage, int scatteringCoefficient); mitk::Image::Pointer ApplyResampling(mitk::Image::Pointer inputImage, unsigned int outputSize[2]); - mitk::Image::Pointer ApplyBeamforming(mitk::Image::Pointer inputImage, BeamformingSettings config, std::function progressHandle = [](int, std::string) {}); + mitk::Image::Pointer ApplyBeamforming(mitk::Image::Pointer inputImage, BeamformingSettings config, std::string& message, std::function progressHandle = [](int, std::string) {}); mitk::Image::Pointer ApplyCropping(mitk::Image::Pointer inputImage, int above, int below, int right, int left, int minSlice, int maxSlice); mitk::Image::Pointer BandpassFilter(mitk::Image::Pointer data, float recordTime, float BPHighPass, float BPLowPass, float alpha); protected: PhotoacousticImage(); virtual ~PhotoacousticImage(); mitk::BeamformingFilter::Pointer m_BeamformingFilter; itk::Image::Pointer BPFunction(mitk::Image::Pointer reference, int cutoffFrequencyPixelHighPass, int cutoffFrequencyPixelLowPass, float alpha); }; } // namespace mitk #endif /* mitkPhotoacousticImage_H_HEADER_INCLUDED */ diff --git a/Modules/PhotoacousticsAlgorithms/source/OpenCLFilter/mitkPhotoacousticOCLBeamformingFilter.cpp b/Modules/PhotoacousticsAlgorithms/source/OpenCLFilter/mitkPhotoacousticOCLBeamformingFilter.cpp index 72187362aa..81d1d23492 100644 --- a/Modules/PhotoacousticsAlgorithms/source/OpenCLFilter/mitkPhotoacousticOCLBeamformingFilter.cpp +++ b/Modules/PhotoacousticsAlgorithms/source/OpenCLFilter/mitkPhotoacousticOCLBeamformingFilter.cpp @@ -1,249 +1,255 @@ /*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #ifdef PHOTOACOUSTICS_USE_GPU #include "./OpenCLFilter/mitkPhotoacousticOCLBeamformingFilter.h" #include "usServiceReference.h" mitk::PhotoacousticOCLBeamformingFilter::PhotoacousticOCLBeamformingFilter() : m_PixelCalculation( NULL ), m_InputImage(mitk::Image::New()), m_ApodizationBuffer(nullptr), m_MemoryLocationsBuffer(nullptr), m_DelaysBuffer(nullptr), m_UsedLinesBuffer(nullptr) { this->AddSourceFile("DAS.cl"); this->AddSourceFile("DMAS.cl"); this->m_FilterID = "OpenCLBeamformingFilter"; this->Initialize(); unsigned int dim[] = { 128, 2048, 2 }; mitk::Vector3D spacing; spacing[0] = 1; spacing[1] = 1; spacing[2] = 1; m_InputImage->Initialize(mitk::MakeScalarPixelType(), 3, dim); m_InputImage->SetSpacing(spacing); m_ChunkSize[0] = 128; m_ChunkSize[1] = 128; m_ChunkSize[2] = 8; m_UsedLinesCalculation = mitk::OCLUsedLinesCalculation::New(); m_DelayCalculation = mitk::OCLDelayCalculation::New(); } mitk::PhotoacousticOCLBeamformingFilter::~PhotoacousticOCLBeamformingFilter() { if ( this->m_PixelCalculation ) { clReleaseKernel( m_PixelCalculation ); } if (m_ApodizationBuffer) clReleaseMemObject(m_ApodizationBuffer); } void mitk::PhotoacousticOCLBeamformingFilter::Update() { //Check if context & program available if (!this->Initialize()) { us::ServiceReference ref = GetModuleContext()->GetServiceReference(); OclResourceService* resources = GetModuleContext()->GetService(ref); // clean-up also the resources resources->InvalidateStorage(); mitkThrow() <<"Filter is not initialized. Cannot update."; } else{ // Execute this->Execute(); } } void mitk::PhotoacousticOCLBeamformingFilter::UpdateDataBuffers() { //Initialize the Output try { MITK_INFO << "Updating Workgroup size for new dimensions"; - size_t outputSize = m_Conf.ReconstructionLines * m_Conf.SamplesPerLine * m_Conf.inputDim[2]; + size_t outputSize = (size_t)m_Conf.ReconstructionLines * (size_t)m_Conf.SamplesPerLine * (size_t)m_Conf.inputDim[2]; m_OutputDim[0] = m_Conf.ReconstructionLines; m_OutputDim[1] = m_Conf.SamplesPerLine; m_OutputDim[2] = m_Conf.inputDim[2]; this->InitExec(this->m_PixelCalculation, m_OutputDim, outputSize, sizeof(float)); } catch (const mitk::Exception& e) { MITK_ERROR << "Caught exception while initializing filter: " << e.what(); return; } if (BeamformingSettings::SettingsChangedOpenCL(m_Conf, m_ConfOld)) { cl_int clErr = 0; MITK_INFO << "Updating Buffers for new configuration"; // create the apodisation buffer if (m_Apodisation == nullptr) { MITK_INFO << "No apodisation function set; Beamforming will be done without any apodisation."; m_Apodisation = new float[1]; m_Apodisation[0] = 1; m_ApodArraySize = 1; } us::ServiceReference ref = GetModuleContext()->GetServiceReference(); OclResourceService* resources = GetModuleContext()->GetService(ref); cl_context gpuContext = resources->GetContext(); if (m_ApodizationBuffer) clReleaseMemObject(m_ApodizationBuffer); this->m_ApodizationBuffer = clCreateBuffer(gpuContext, CL_MEM_READ_ONLY | CL_MEM_USE_HOST_PTR, sizeof(float) * m_ApodArraySize, m_Apodisation, &clErr); CHECK_OCL_ERR(clErr); // calculate used lines m_UsedLinesCalculation->SetConfig(m_Conf); m_UsedLinesCalculation->Update(); m_UsedLinesBuffer = m_UsedLinesCalculation->GetGPUOutput()->GetGPUBuffer(); // calculate the Delays m_DelayCalculation->SetConfig(m_Conf); m_DelayCalculation->SetInputs(m_UsedLinesBuffer); m_DelayCalculation->Update(); m_DelaysBuffer = m_DelayCalculation->GetGPUOutput()->GetGPUBuffer(); m_ConfOld = m_Conf; } } void mitk::PhotoacousticOCLBeamformingFilter::Execute() { cl_int clErr = 0; UpdateDataBuffers(); clErr = clSetKernelArg(this->m_PixelCalculation, 2, sizeof(cl_mem), &(this->m_UsedLinesBuffer)); clErr |= clSetKernelArg(this->m_PixelCalculation, 3, sizeof(cl_mem), &(this->m_DelaysBuffer)); clErr |= clSetKernelArg(this->m_PixelCalculation, 4, sizeof(cl_mem), &(this->m_ApodizationBuffer)); clErr |= clSetKernelArg(this->m_PixelCalculation, 5, sizeof(cl_ushort), &(this->m_ApodArraySize)); clErr |= clSetKernelArg(this->m_PixelCalculation, 6, sizeof(cl_uint), &(this->m_Conf.inputDim[0])); clErr |= clSetKernelArg(this->m_PixelCalculation, 7, sizeof(cl_uint), &(this->m_Conf.inputDim[1])); clErr |= clSetKernelArg(this->m_PixelCalculation, 8, sizeof(cl_uint), &(this->m_Conf.inputDim[2])); clErr |= clSetKernelArg(this->m_PixelCalculation, 9, sizeof(cl_uint), &(this->m_Conf.ReconstructionLines)); clErr |= clSetKernelArg(this->m_PixelCalculation, 10, sizeof(cl_uint), &(this->m_Conf.SamplesPerLine)); // execute the filter on a 3D NDRange if (m_OutputDim[2] == 1 || m_ChunkSize[2] == 1) - this->ExecuteKernelChunks(m_PixelCalculation, 2, m_ChunkSize); + { + if(!this->ExecuteKernelChunks(m_PixelCalculation, 2, m_ChunkSize)) + mitkThrow() << "openCL Error when executing Kernel"; + } else - this->ExecuteKernelChunks(m_PixelCalculation, 3, m_ChunkSize); + { + if(!this->ExecuteKernelChunks(m_PixelCalculation, 3, m_ChunkSize)) + mitkThrow() << "openCL Error when executing Kernel"; + } // signalize the GPU-side data changed m_Output->Modified( GPU_DATA ); } us::Module *mitk::PhotoacousticOCLBeamformingFilter::GetModule() { return us::GetModuleContext()->GetModule(); } bool mitk::PhotoacousticOCLBeamformingFilter::Initialize() { bool buildErr = true; cl_int clErr = 0; if ( OclFilter::Initialize() ) { switch (m_Conf.Algorithm) { case BeamformingSettings::BeamformingAlgorithm::DAS: { this->m_PixelCalculation = clCreateKernel(this->m_ClProgram, "ckDAS", &clErr); break; } case BeamformingSettings::BeamformingAlgorithm::DMAS: { this->m_PixelCalculation = clCreateKernel(this->m_ClProgram, "ckDMAS", &clErr); break; } default: { MITK_INFO << "No beamforming algorithm specified, setting to DAS"; this->m_PixelCalculation = clCreateKernel(this->m_ClProgram, "ckDAS", &clErr); break; } } buildErr |= CHECK_OCL_ERR( clErr ); } CHECK_OCL_ERR(clErr); return (OclFilter::IsInitialized() && buildErr ); } void mitk::PhotoacousticOCLBeamformingFilter::SetInput(mitk::Image::Pointer image) { OclDataSetToDataSetFilter::SetInput(image); m_InputImage = image; m_Conf.inputDim[0] = m_InputImage->GetDimension(0); m_Conf.inputDim[1] = m_InputImage->GetDimension(1); m_Conf.inputDim[2] = m_InputImage->GetDimension(2); } void mitk::PhotoacousticOCLBeamformingFilter::SetInput(void* data, unsigned int* dimensions, unsigned int BpE) { OclDataSetToDataSetFilter::SetInput(data, dimensions[0] * dimensions[1] * dimensions[2], BpE); m_Conf.inputDim[0] = dimensions[0]; m_Conf.inputDim[1] = dimensions[1]; m_Conf.inputDim[2] = dimensions[2]; } mitk::Image::Pointer mitk::PhotoacousticOCLBeamformingFilter::GetOutputAsImage() { mitk::Image::Pointer outputImage = mitk::Image::New(); if (m_Output->IsModified(GPU_DATA)) { void* pData = m_Output->TransferDataToCPU(m_CommandQue); const unsigned int dimension = 3; unsigned int dimensions[3] = { (unsigned int)m_OutputDim[0], (unsigned int)m_OutputDim[1], (unsigned int)m_OutputDim[2] }; const mitk::SlicedGeometry3D::Pointer p_slg = m_InputImage->GetSlicedGeometry(); MITK_DEBUG << "Creating new MITK Image."; outputImage->Initialize(this->GetOutputType(), dimension, dimensions); outputImage->SetSpacing(p_slg->GetSpacing()); outputImage->SetImportVolume(pData, 0, 0, mitk::Image::ReferenceMemory); } MITK_DEBUG << "Image Initialized."; return outputImage; } void* mitk::PhotoacousticOCLBeamformingFilter::GetOutput() { return OclDataSetToDataSetFilter::GetOutput(); } #endif diff --git a/Modules/PhotoacousticsAlgorithms/source/mitkPhotoacousticBeamformingFilter.cpp b/Modules/PhotoacousticsAlgorithms/source/mitkPhotoacousticBeamformingFilter.cpp index 9edb570cc2..af04daac25 100644 --- a/Modules/PhotoacousticsAlgorithms/source/mitkPhotoacousticBeamformingFilter.cpp +++ b/Modules/PhotoacousticsAlgorithms/source/mitkPhotoacousticBeamformingFilter.cpp @@ -1,541 +1,558 @@ /*=================================================================== mitkPhotoacousticBeamformingFilter The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #define _USE_MATH_DEFINES #include "mitkProperties.h" #include "mitkImageReadAccessor.h" #include #include #include #include #include #include #include "mitkImageCast.h" #include "mitkPhotoacousticBeamformingFilter.h" -mitk::BeamformingFilter::BeamformingFilter() : m_OutputData(nullptr), m_InputData(nullptr) +mitk::BeamformingFilter::BeamformingFilter() : m_OutputData(nullptr), m_InputData(nullptr), m_Message("noMessage") { this->SetNumberOfIndexedInputs(1); this->SetNumberOfRequiredInputs(1); m_ProgressHandle = [](int, std::string) {}; m_BeamformingOclFilter = mitk::PhotoacousticOCLBeamformingFilter::New(); + + m_VonHannFunction = VonHannFunction(m_Conf.apodizationArraySize); + m_HammFunction = HammFunction(m_Conf.apodizationArraySize); + m_BoxFunction = BoxFunction(m_Conf.apodizationArraySize); } void mitk::BeamformingFilter::SetProgressHandle(std::function progressHandle) { m_ProgressHandle = progressHandle; } mitk::BeamformingFilter::~BeamformingFilter() { + delete[] m_VonHannFunction; + delete[] m_HammFunction; + delete[] m_BoxFunction; } void mitk::BeamformingFilter::GenerateInputRequestedRegion() { Superclass::GenerateInputRequestedRegion(); mitk::Image* output = this->GetOutput(); mitk::Image* input = const_cast (this->GetInput()); if (!output->IsInitialized()) { return; } input->SetRequestedRegionToLargestPossibleRegion(); //GenerateTimeInInputRegion(output, input); } void mitk::BeamformingFilter::GenerateOutputInformation() { mitk::Image::ConstPointer input = this->GetInput(); mitk::Image::Pointer output = this->GetOutput(); if ((output->IsInitialized()) && (this->GetMTime() <= m_TimeOfHeaderInitialization.GetMTime())) return; itkDebugMacro(<< "GenerateOutputInformation()"); unsigned int dim[] = { m_Conf.ReconstructionLines, m_Conf.SamplesPerLine, input->GetDimension(2) }; output->Initialize(mitk::MakeScalarPixelType(), 3, dim); mitk::Vector3D spacing; spacing[0] = m_Conf.Pitch * m_Conf.TransducerElements * 1000 / m_Conf.ReconstructionLines; spacing[1] = m_Conf.RecordTime / 2 * m_Conf.SpeedOfSound * 1000 / m_Conf.SamplesPerLine; spacing[2] = 1; output->GetGeometry()->SetSpacing(spacing); output->GetGeometry()->Modified(); output->SetPropertyList(input->GetPropertyList()->Clone()); m_TimeOfHeaderInitialization.Modified(); } void mitk::BeamformingFilter::GenerateData() { GenerateOutputInformation(); mitk::Image::Pointer input = this->GetInput(); mitk::Image::Pointer output = this->GetOutput(); if (!output->IsInitialized()) return; - const int apodArraySize = m_Conf.TransducerElements; // set the resolution of the apodization array float* ApodWindow; + if (m_ConfOld.apodizationArraySize != m_Conf.apodizationArraySize) + { + delete[] m_VonHannFunction; + delete[] m_HammFunction; + delete[] m_BoxFunction; + m_VonHannFunction = VonHannFunction(m_Conf.apodizationArraySize); + m_HammFunction = HammFunction(m_Conf.apodizationArraySize); + m_BoxFunction = BoxFunction(m_Conf.apodizationArraySize); + + m_ConfOld = m_Conf; + } - // calculate the appropiate apodization window + // set the appropiate apodization window switch (m_Conf.Apod) { - case BeamformingSettings::Apodization::Hann: - ApodWindow = VonHannFunction(apodArraySize); - break; - case BeamformingSettings::Apodization::Hamm: - ApodWindow = HammFunction(apodArraySize); - break; - case BeamformingSettings::Apodization::Box: - ApodWindow = BoxFunction(apodArraySize); - break; - default: - ApodWindow = BoxFunction(apodArraySize); - break; + case BeamformingSettings::Apodization::Hann: + ApodWindow = m_VonHannFunction; + break; + case BeamformingSettings::Apodization::Hamm: + ApodWindow = m_HammFunction; + break; + case BeamformingSettings::Apodization::Box: + ApodWindow = m_BoxFunction; + break; + default: + ApodWindow = m_BoxFunction; + break; } auto begin = std::chrono::high_resolution_clock::now(); // debbuging the performance... if (!m_Conf.UseGPU) { int progInterval = output->GetDimension(2) / 20 > 1 ? output->GetDimension(2) / 20 : 1; // the interval at which we update the gui progress bar float inputDim[2] = { (float)input->GetDimension(0), (float)input->GetDimension(1) }; float outputDim[2] = { (float)output->GetDimension(0), (float)output->GetDimension(1) }; for (unsigned int i = 0; i < output->GetDimension(2); ++i) // seperate Slices should get Beamforming seperately applied { mitk::ImageReadAccessor inputReadAccessor(input, input->GetSliceData(i)); // first, we check whether the dara is float, other formats are unsupported if (input->GetPixelType().GetTypeAsString() == "scalar (float)" || input->GetPixelType().GetTypeAsString() == " (float)") { m_InputData = (float*)inputReadAccessor.GetData(); } else { MITK_INFO << "Pixel type is not float, abort"; return; } m_OutputData = new float[m_Conf.ReconstructionLines*m_Conf.SamplesPerLine]; // fill the image with zeros for (int l = 0; l < outputDim[0]; ++l) { for (int s = 0; s < outputDim[1]; ++s) { m_OutputData[l*(short)outputDim[1] + s] = 0; } } std::thread *threads = new std::thread[(short)outputDim[0]]; // every line will be beamformed in a seperate thread if (m_Conf.Algorithm == BeamformingSettings::BeamformingAlgorithm::DAS) { if (m_Conf.DelayCalculationMethod == BeamformingSettings::DelayCalc::QuadApprox) { for (short line = 0; line < outputDim[0]; ++line) { - threads[line] = std::thread(&BeamformingFilter::DASQuadraticLine, this, m_InputData, m_OutputData, inputDim, outputDim, line, ApodWindow, apodArraySize); + threads[line] = std::thread(&BeamformingFilter::DASQuadraticLine, this, m_InputData, m_OutputData, inputDim, outputDim, line, ApodWindow, m_Conf.apodizationArraySize); } } else if (m_Conf.DelayCalculationMethod == BeamformingSettings::DelayCalc::Spherical) { for (short line = 0; line < outputDim[0]; ++line) { - threads[line] = std::thread(&BeamformingFilter::DASSphericalLine, this, m_InputData, m_OutputData, inputDim, outputDim, line, ApodWindow, apodArraySize); + threads[line] = std::thread(&BeamformingFilter::DASSphericalLine, this, m_InputData, m_OutputData, inputDim, outputDim, line, ApodWindow, m_Conf.apodizationArraySize); } } } else if (m_Conf.Algorithm == BeamformingSettings::BeamformingAlgorithm::DMAS) { if (m_Conf.DelayCalculationMethod == BeamformingSettings::DelayCalc::QuadApprox) { for (short line = 0; line < outputDim[0]; ++line) { - threads[line] = std::thread(&BeamformingFilter::DMASQuadraticLine, this, m_InputData, m_OutputData, inputDim, outputDim, line, ApodWindow, apodArraySize); + threads[line] = std::thread(&BeamformingFilter::DMASQuadraticLine, this, m_InputData, m_OutputData, inputDim, outputDim, line, ApodWindow, m_Conf.apodizationArraySize); } } else if (m_Conf.DelayCalculationMethod == BeamformingSettings::DelayCalc::Spherical) { for (short line = 0; line < outputDim[0]; ++line) { - threads[line] = std::thread(&BeamformingFilter::DMASSphericalLine, this, m_InputData, m_OutputData, inputDim, outputDim, line, ApodWindow, apodArraySize); + threads[line] = std::thread(&BeamformingFilter::DMASSphericalLine, this, m_InputData, m_OutputData, inputDim, outputDim, line, ApodWindow, m_Conf.apodizationArraySize); } } } // wait for all lines to finish for (short line = 0; line < outputDim[0]; ++line) { threads[line].join(); } output->SetSlice(m_OutputData, i); if (i % progInterval == 0) m_ProgressHandle((int)((i + 1) / (float)output->GetDimension(2) * 100), "performing reconstruction"); delete[] m_OutputData; m_OutputData = nullptr; m_InputData = nullptr; } } - #ifdef PHOTOACOUSTICS_USE_GPU +#ifdef PHOTOACOUSTICS_USE_GPU else { try { // first, we check whether the data is float, other formats are unsupported if (!(input->GetPixelType().GetTypeAsString() == "scalar (float)" || input->GetPixelType().GetTypeAsString() == " (float)")) { MITK_ERROR << "Pixel type is not float, abort"; return; } m_ProgressHandle(50, "performing reconstruction"); - m_BeamformingOclFilter->SetApodisation(ApodWindow, apodArraySize); + m_BeamformingOclFilter->SetApodisation(ApodWindow, m_Conf.apodizationArraySize); m_BeamformingOclFilter->SetConfig(m_Conf); m_BeamformingOclFilter->SetInput(input); m_BeamformingOclFilter->Update(); void* out = m_BeamformingOclFilter->GetOutput(); output->SetImportVolume(out, 0, 0, mitk::Image::ReferenceMemory); } catch (mitk::Exception &e) { std::string errorMessage = "Caught unexpected exception "; errorMessage.append(e.what()); MITK_ERROR << errorMessage; + + float* dummyData = new float[m_Conf.ReconstructionLines * m_Conf.SamplesPerLine * m_Conf.inputDim[2]]; + output->SetImportVolume(dummyData, 0, 0, mitk::Image::ImportMemoryManagementType::ManageMemory); + + m_Message = "An openCL error occurred; all GPU operations in this and the next session may be corrupted."; } } - #endif +#endif m_TimeOfHeaderInitialization.Modified(); auto end = std::chrono::high_resolution_clock::now(); MITK_INFO << "Beamforming of " << output->GetDimension(2) << " Images completed in " << ((float)std::chrono::duration_cast(end - begin).count()) / 1000000 << "ms" << std::endl; } -void mitk::BeamformingFilter::Configure(BeamformingSettings settings) -{ - m_Conf = settings; -} - float* mitk::BeamformingFilter::VonHannFunction(int samples) { float* ApodWindow = new float[samples]; for (int n = 0; n < samples; ++n) { ApodWindow[n] = (1 - cos(2 * M_PI * n / (samples - 1))) / 2; } return ApodWindow; } float* mitk::BeamformingFilter::HammFunction(int samples) { float* ApodWindow = new float[samples]; for (int n = 0; n < samples; ++n) { ApodWindow[n] = 0.54 - 0.46*cos(2 * M_PI*n / (samples - 1)); } return ApodWindow; } float* mitk::BeamformingFilter::BoxFunction(int samples) { float* ApodWindow = new float[samples]; for (int n = 0; n < samples; ++n) { ApodWindow[n] = 1; } return ApodWindow; } void mitk::BeamformingFilter::DASQuadraticLine(float* input, float* output, float inputDim[2], float outputDim[2], const short& line, float* apodisation, const short& apodArraySize) { float& inputS = inputDim[1]; float& inputL = inputDim[0]; float& outputS = outputDim[1]; float& outputL = outputDim[0]; short AddSample = 0; short maxLine = 0; short minLine = 0; float delayMultiplicator = 0; float l_i = 0; float s_i = 0; float part = 0.07 * inputL; float tan_phi = std::tan(m_Conf.Angle / 360 * 2 * M_PI); float part_multiplicator = tan_phi * m_Conf.TimeSpacing * m_Conf.SpeedOfSound / m_Conf.Pitch * inputL / m_Conf.TransducerElements; float apod_mult = 1; short usedLines = (maxLine - minLine); //quadratic delay l_i = line / outputL * inputL; for (short sample = 0; sample < outputS; ++sample) { s_i = (float)sample / outputS * inputS / 2; part = part_multiplicator*s_i; if (part < 1) part = 1; maxLine = (short)std::min((l_i + part) + 1, inputL); minLine = (short)std::max((l_i - part), 0.0f); usedLines = (maxLine - minLine); apod_mult = (float)apodArraySize / (float)usedLines; delayMultiplicator = pow((1 / (m_Conf.TimeSpacing*m_Conf.SpeedOfSound) * (m_Conf.Pitch*m_Conf.TransducerElements) / inputL), 2) / s_i / 2; for (short l_s = minLine; l_s < maxLine; ++l_s) { AddSample = delayMultiplicator * pow((l_s - l_i), 2) + s_i + (1 - m_Conf.isPhotoacousticImage)*s_i; if (AddSample < inputS && AddSample >= 0) output[sample*(short)outputL + line] += input[l_s + AddSample*(short)inputL] * apodisation[(short)((l_s - minLine)*apod_mult)]; else --usedLines; } output[sample*(short)outputL + line] = output[sample*(short)outputL + line] / usedLines; } } void mitk::BeamformingFilter::DASSphericalLine(float* input, float* output, float inputDim[2], float outputDim[2], const short& line, float* apodisation, const short& apodArraySize) { float& inputS = inputDim[1]; float& inputL = inputDim[0]; float& outputS = outputDim[1]; float& outputL = outputDim[0]; short AddSample = 0; short maxLine = 0; short minLine = 0; float l_i = 0; float s_i = 0; float part = 0.07 * inputL; float tan_phi = std::tan(m_Conf.Angle / 360 * 2 * M_PI); float part_multiplicator = tan_phi * m_Conf.TimeSpacing * m_Conf.SpeedOfSound / m_Conf.Pitch * inputL / m_Conf.TransducerElements; float apod_mult = 1; short usedLines = (maxLine - minLine); //exact delay l_i = (float)line / outputL * inputL; for (short sample = 0; sample < outputS; ++sample) { s_i = (float)sample / outputS * inputS / 2; part = part_multiplicator*s_i; if (part < 1) part = 1; maxLine = (short)std::min((l_i + part) + 1, inputL); minLine = (short)std::max((l_i - part), 0.0f); usedLines = (maxLine - minLine); apod_mult = (float)apodArraySize / (float)usedLines; for (short l_s = minLine; l_s < maxLine; ++l_s) { AddSample = (int)sqrt( pow(s_i, 2) + pow((1 / (m_Conf.TimeSpacing*m_Conf.SpeedOfSound) * ((l_s - l_i)*m_Conf.Pitch*m_Conf.TransducerElements) / inputL), 2) ) + (1 - m_Conf.isPhotoacousticImage)*s_i; if (AddSample < inputS && AddSample >= 0) output[sample*(short)outputL + line] += input[l_s + AddSample*(short)inputL] * apodisation[(short)((l_s - minLine)*apod_mult)]; else --usedLines; } output[sample*(short)outputL + line] = output[sample*(short)outputL + line] / usedLines; } } void mitk::BeamformingFilter::DMASQuadraticLine(float* input, float* output, float inputDim[2], float outputDim[2], const short& line, float* apodisation, const short& apodArraySize) { float& inputS = inputDim[1]; float& inputL = inputDim[0]; float& outputS = outputDim[1]; float& outputL = outputDim[0]; short maxLine = 0; short minLine = 0; float delayMultiplicator = 0; float l_i = 0; float s_i = 0; float part = 0.07 * inputL; float tan_phi = std::tan(m_Conf.Angle / 360 * 2 * M_PI); float part_multiplicator = tan_phi * m_Conf.TimeSpacing * m_Conf.SpeedOfSound / m_Conf.Pitch * inputL / m_Conf.TransducerElements; float apod_mult = 1; float mult = 0; short usedLines = (maxLine - minLine); //quadratic delay l_i = line / outputL * inputL; for (short sample = 0; sample < outputS; ++sample) { s_i = sample / outputS * inputS / 2; part = part_multiplicator*s_i; if (part < 1) part = 1; maxLine = (short)std::min((l_i + part) + 1, inputL); minLine = (short)std::max((l_i - part), 0.0f); usedLines = (maxLine - minLine); apod_mult = (float)apodArraySize / (float)usedLines; delayMultiplicator = pow((1 / (m_Conf.TimeSpacing*m_Conf.SpeedOfSound) * (m_Conf.Pitch*m_Conf.TransducerElements) / inputL), 2) / s_i / 2; //calculate the AddSamples beforehand to save some time short* AddSample = new short[maxLine - minLine]; for (short l_s = 0; l_s < maxLine - minLine; ++l_s) { AddSample[l_s] = (short)(delayMultiplicator * pow((minLine + l_s - l_i), 2) + s_i) + (1 - m_Conf.isPhotoacousticImage)*s_i; } for (short l_s1 = minLine; l_s1 < maxLine - 1; ++l_s1) { if (AddSample[l_s1 - minLine] < (short)inputS && AddSample[l_s1 - minLine] >= 0) { for (short l_s2 = l_s1 + 1; l_s2 < maxLine; ++l_s2) { if (AddSample[l_s2 - minLine] < inputS && AddSample[l_s2 - minLine] >= 0) { mult = input[l_s2 + AddSample[l_s2 - minLine] * (short)inputL] * apodisation[(short)((l_s2 - minLine)*apod_mult)] * input[l_s1 + AddSample[l_s1 - minLine] * (short)inputL] * apodisation[(short)((l_s1 - minLine)*apod_mult)]; output[sample*(short)outputL + line] += sqrt(abs(mult)) * ((mult > 0) - (mult < 0)); } } } else --usedLines; } output[sample*(short)outputL + line] = output[sample*(short)outputL + line] / (pow(usedLines, 2) - (usedLines - 1)); delete[] AddSample; } } void mitk::BeamformingFilter::DMASSphericalLine(float* input, float* output, float inputDim[2], float outputDim[2], const short& line, float* apodisation, const short& apodArraySize) { float& inputS = inputDim[1]; float& inputL = inputDim[0]; float& outputS = outputDim[1]; float& outputL = outputDim[0]; short maxLine = 0; short minLine = 0; float l_i = 0; float s_i = 0; float part = 0.07 * inputL; float tan_phi = std::tan(m_Conf.Angle / 360 * 2 * M_PI); float part_multiplicator = tan_phi * m_Conf.TimeSpacing * m_Conf.SpeedOfSound / m_Conf.Pitch * inputL / m_Conf.TransducerElements; float apod_mult = 1; float mult = 0; short usedLines = (maxLine - minLine); //exact delay l_i = line / outputL * inputL; for (short sample = 0; sample < outputS; ++sample) { s_i = sample / outputS * inputS / 2; part = part_multiplicator*s_i; if (part < 1) part = 1; maxLine = (short)std::min((l_i + part) + 1, inputL); minLine = (short)std::max((l_i - part), 0.0f); usedLines = (maxLine - minLine); apod_mult = (float)apodArraySize / (float)usedLines; //calculate the AddSamples beforehand to save some time short* AddSample = new short[maxLine - minLine]; for (short l_s = 0; l_s < maxLine - minLine; ++l_s) { AddSample[l_s] = (short)sqrt( pow(s_i, 2) + pow((1 / (m_Conf.TimeSpacing*m_Conf.SpeedOfSound) * ((minLine + l_s - l_i)*m_Conf.Pitch*m_Conf.TransducerElements) / inputL), 2) ) + (1 - m_Conf.isPhotoacousticImage)*s_i; } for (short l_s1 = minLine; l_s1 < maxLine - 1; ++l_s1) { if (AddSample[l_s1 - minLine] < inputS && AddSample[l_s1 - minLine] >= 0) { for (short l_s2 = l_s1 + 1; l_s2 < maxLine; ++l_s2) { if (AddSample[l_s2 - minLine] < inputS && AddSample[l_s2 - minLine] >= 0) { mult = input[l_s2 + AddSample[l_s2 - minLine] * (short)inputL] * apodisation[(int)((l_s2 - minLine)*apod_mult)] * input[l_s1 + AddSample[l_s1 - minLine] * (short)inputL] * apodisation[(int)((l_s1 - minLine)*apod_mult)]; output[sample*(short)outputL + line] += sqrt(abs(mult)) * ((mult > 0) - (mult < 0)); } } } else --usedLines; } output[sample*(short)outputL + line] = output[sample*(short)outputL + line] / (float)(pow(usedLines, 2) - (usedLines - 1)); delete[] AddSample; } } diff --git a/Modules/PhotoacousticsAlgorithms/source/mitkPhotoacousticImage.cpp b/Modules/PhotoacousticsAlgorithms/source/mitkPhotoacousticImage.cpp index a838dad9fc..dd97e5f6fd 100644 --- a/Modules/PhotoacousticsAlgorithms/source/mitkPhotoacousticImage.cpp +++ b/Modules/PhotoacousticsAlgorithms/source/mitkPhotoacousticImage.cpp @@ -1,521 +1,522 @@ /*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #define _USE_MATH_DEFINES #include #include "mitkPhotoacousticImage.h" #include "../ITKFilter/ITKUltrasound/itkBModeImageFilter.h" #include "../ITKFilter/itkPhotoacousticBModeImageFilter.h" #include "mitkImageCast.h" #include "mitkITKImageImport.h" #include "mitkPhotoacousticBeamformingFilter.h" #include #include #include "./OpenCLFilter/mitkPhotoacousticBModeFilter.h" // itk dependencies #include "itkImage.h" #include "itkResampleImageFilter.h" #include "itkCastImageFilter.h" #include "itkCropImageFilter.h" #include "itkRescaleIntensityImageFilter.h" #include "itkIntensityWindowingImageFilter.h" #include #include "itkMultiplyImageFilter.h" #include "itkBSplineInterpolateImageFunction.h" #include // needed itk image filters #include "mitkITKImageImport.h" #include "itkFFTShiftImageFilter.h" #include "itkMultiplyImageFilter.h" #include "itkComplexToModulusImageFilter.h" #include #include "../ITKFilter/ITKUltrasound/itkFFT1DComplexConjugateToRealImageFilter.h" #include "../ITKFilter/ITKUltrasound/itkFFT1DRealToComplexConjugateImageFilter.h" mitk::PhotoacousticImage::PhotoacousticImage() : m_BeamformingFilter(BeamformingFilter::New()) { MITK_INFO << "[PhotoacousticImage Debug] created that image"; } mitk::PhotoacousticImage::~PhotoacousticImage() { MITK_INFO << "[PhotoacousticImage Debug] destroyed that image"; } mitk::Image::Pointer mitk::PhotoacousticImage::ApplyBmodeFilter(mitk::Image::Pointer inputImage, BModeMethod method, bool UseGPU, bool UseLogFilter, float resampleSpacing) { // the image needs to be of floating point type for the envelope filter to work; the casting is done automatically by the CastToItkImage typedef itk::Image< float, 3 > itkFloatImageType; typedef itk::IdentityTransform TransformType; if (method == BModeMethod::Abs) { mitk::Image::Pointer input; mitk::Image::Pointer out; if (inputImage->GetPixelType().GetTypeAsString() == "scalar (float)" || inputImage->GetPixelType().GetTypeAsString() == " (float)") input = inputImage; else input = ApplyCropping(inputImage, 0, 0, 0, 0, 0, inputImage->GetDimension(2) - 1); if (!UseGPU) { PhotoacousticBModeFilter::Pointer filter = PhotoacousticBModeFilter::New(); filter->SetParameters(UseLogFilter); filter->SetInput(input); filter->Update(); out = filter->GetOutput(); if (resampleSpacing == 0) return out; } #ifdef PHOTOACOUSTICS_USE_GPU else { PhotoacousticOCLBModeFilter::Pointer filter = PhotoacousticOCLBModeFilter::New(); filter->SetParameters(UseLogFilter); filter->SetInput(input); filter->Update(); out = filter->GetOutput(); if (resampleSpacing == 0) return out; } #endif typedef itk::ResampleImageFilter < itkFloatImageType, itkFloatImageType > ResampleImageFilter; ResampleImageFilter::Pointer resampleImageFilter = ResampleImageFilter::New(); itkFloatImageType::Pointer itkImage; mitk::CastToItkImage(out, itkImage); itkFloatImageType::SpacingType outputSpacing; itkFloatImageType::SizeType inputSize = itkImage->GetLargestPossibleRegion().GetSize(); itkFloatImageType::SizeType outputSize = inputSize; outputSpacing[0] = itkImage->GetSpacing()[0]; outputSpacing[1] = resampleSpacing; outputSpacing[2] = itkImage->GetSpacing()[2]; outputSize[1] = inputSize[1] * itkImage->GetSpacing()[1] / outputSpacing[1]; typedef itk::IdentityTransform TransformType; resampleImageFilter->SetInput(itkImage); resampleImageFilter->SetSize(outputSize); resampleImageFilter->SetOutputSpacing(outputSpacing); resampleImageFilter->SetTransform(TransformType::New()); resampleImageFilter->UpdateLargestPossibleRegion(); return mitk::GrabItkImageMemory(resampleImageFilter->GetOutput()); } else if (method == BModeMethod::ShapeDetection) { typedef itk::BModeImageFilter < itkFloatImageType, itkFloatImageType > BModeFilterType; BModeFilterType::Pointer bModeFilter = BModeFilterType::New(); // LogFilter typedef itk::PhotoacousticBModeImageFilter < itkFloatImageType, itkFloatImageType > PhotoacousticBModeImageFilter; PhotoacousticBModeImageFilter::Pointer photoacousticBModeFilter = PhotoacousticBModeImageFilter::New(); // No LogFilter typedef itk::ResampleImageFilter < itkFloatImageType, itkFloatImageType > ResampleImageFilter; ResampleImageFilter::Pointer resampleImageFilter = ResampleImageFilter::New(); itkFloatImageType::Pointer itkImage; mitk::CastToItkImage(inputImage, itkImage); itkFloatImageType::Pointer bmode; if (UseLogFilter) { bModeFilter->SetInput(itkImage); bModeFilter->SetDirection(1); bmode = bModeFilter->GetOutput(); } else { photoacousticBModeFilter->SetInput(itkImage); photoacousticBModeFilter->SetDirection(1); bmode = photoacousticBModeFilter->GetOutput(); } // resampleSpacing == 0 means: do no resampling if (resampleSpacing == 0) { return mitk::GrabItkImageMemory(bmode); } itkFloatImageType::SpacingType outputSpacing; itkFloatImageType::SizeType inputSize = itkImage->GetLargestPossibleRegion().GetSize(); itkFloatImageType::SizeType outputSize = inputSize; outputSpacing[0] = itkImage->GetSpacing()[0]; outputSpacing[1] = resampleSpacing; outputSpacing[2] = itkImage->GetSpacing()[2]; outputSize[1] = inputSize[1] * itkImage->GetSpacing()[1] / outputSpacing[1]; resampleImageFilter->SetInput(bmode); resampleImageFilter->SetSize(outputSize); resampleImageFilter->SetOutputSpacing(outputSpacing); resampleImageFilter->SetTransform(TransformType::New()); resampleImageFilter->UpdateLargestPossibleRegion(); return mitk::GrabItkImageMemory(resampleImageFilter->GetOutput()); } return nullptr; } /*mitk::Image::Pointer mitk::PhotoacousticImage::ApplyScatteringCompensation(mitk::Image::Pointer inputImage, int scattering) { typedef itk::Image< float, 3 > itkFloatImageType; typedef itk::MultiplyImageFilter MultiplyImageFilterType; itkFloatImageType::Pointer itkImage; mitk::CastToItkImage(inputImage, itkImage); MultiplyImageFilterType::Pointer multiplyFilter = MultiplyImageFilterType::New(); multiplyFilter->SetInput1(itkImage); multiplyFilter->SetInput2(m_FluenceCompResizedItk.at(m_ScatteringCoefficient)); return mitk::GrabItkImageMemory(multiplyFilter->GetOutput()); }*/ mitk::Image::Pointer mitk::PhotoacousticImage::ApplyResampling(mitk::Image::Pointer inputImage, unsigned int outputSize[2]) { typedef itk::Image< float, 3 > itkFloatImageType; typedef itk::ResampleImageFilter < itkFloatImageType, itkFloatImageType > ResampleImageFilter; ResampleImageFilter::Pointer resampleImageFilter = ResampleImageFilter::New(); typedef itk::LinearInterpolateImageFunction T_Interpolator; itkFloatImageType::Pointer itkImage; mitk::CastToItkImage(inputImage, itkImage); itkFloatImageType::SpacingType outputSpacingItk; itkFloatImageType::SizeType inputSizeItk = itkImage->GetLargestPossibleRegion().GetSize(); itkFloatImageType::SizeType outputSizeItk = inputSizeItk; outputSizeItk[0] = outputSize[0]; outputSizeItk[1] = outputSize[1]; outputSizeItk[2] = inputSizeItk[2]; outputSpacingItk[0] = itkImage->GetSpacing()[0] * (static_cast(inputSizeItk[0]) / static_cast(outputSizeItk[0])); outputSpacingItk[1] = itkImage->GetSpacing()[1] * (static_cast(inputSizeItk[1]) / static_cast(outputSizeItk[1])); outputSpacingItk[2] = itkImage->GetSpacing()[2]; typedef itk::IdentityTransform TransformType; T_Interpolator::Pointer _pInterpolator = T_Interpolator::New(); resampleImageFilter->SetInput(itkImage); resampleImageFilter->SetSize(outputSizeItk); resampleImageFilter->SetOutputSpacing(outputSpacingItk); resampleImageFilter->SetTransform(TransformType::New()); resampleImageFilter->SetInterpolator(_pInterpolator); resampleImageFilter->UpdateLargestPossibleRegion(); return mitk::GrabItkImageMemory(resampleImageFilter->GetOutput()); } mitk::Image::Pointer mitk::PhotoacousticImage::ApplyCropping(mitk::Image::Pointer inputImage, int above, int below, int right, int left, int minSlice, int maxSlice) { unsigned int inputDim[3] = { inputImage->GetDimension(0), inputImage->GetDimension(1), inputImage->GetDimension(2) }; unsigned int outputDim[3] = { inputImage->GetDimension(0) - left - right, inputImage->GetDimension(1) - (unsigned int)above - (unsigned int)below, (unsigned int)maxSlice - (unsigned int)minSlice + 1 }; void* inputData; float* outputData = new float[outputDim[0] * outputDim[1] * outputDim[2]]; ImageReadAccessor acc(inputImage); inputData = const_cast(acc.GetData()); // convert the data to float by default // as of now only those float, short, float are used at all... though it's easy to add other ones if (inputImage->GetPixelType().GetTypeAsString() == "scalar (float)" || inputImage->GetPixelType().GetTypeAsString() == " (float)") { // copy the data into the cropped image for (unsigned short sl = 0; sl < outputDim[2]; ++sl) { for (unsigned short l = 0; l < outputDim[0]; ++l) { for (unsigned short s = 0; s < outputDim[1]; ++s) { outputData[l + s*(unsigned short)outputDim[0] + sl*outputDim[0] * outputDim[1]] = (float)((float*)inputData)[(l + left) + (s + above)*(unsigned short)inputDim[0] + (sl + minSlice)*inputDim[0] * inputDim[1]]; } } } } else if (inputImage->GetPixelType().GetTypeAsString() == "scalar (short)" || inputImage->GetPixelType().GetTypeAsString() == " (short)") { // copy the data unsigned shorto the cropped image for (unsigned short sl = 0; sl < outputDim[2]; ++sl) { for (unsigned short l = 0; l < outputDim[0]; ++l) { for (unsigned short s = 0; s < outputDim[1]; ++s) { outputData[l + s*(unsigned short)outputDim[0] + sl*outputDim[0] * outputDim[1]] = (float)((short*)inputData)[(l + left) + (s + above)*(unsigned short)inputDim[0] + (sl + minSlice)*inputDim[0] * inputDim[1]]; } } } } else if (inputImage->GetPixelType().GetTypeAsString() == "scalar (double)" || inputImage->GetPixelType().GetTypeAsString() == " (double)") { // copy the data unsigned shorto the cropped image for (unsigned short sl = 0; sl < outputDim[2]; ++sl) { for (unsigned short l = 0; l < outputDim[0]; ++l) { for (unsigned short s = 0; s < outputDim[1]; ++s) { outputData[l + s*(unsigned short)outputDim[0] + sl*outputDim[0] * outputDim[1]] = (float)((double*)inputData)[(l + left) + (s + above)*(unsigned short)inputDim[0] + (sl + minSlice)*inputDim[0] * inputDim[1]]; } } } } else { MITK_INFO << "Could not determine pixel type"; } mitk::Image::Pointer output = mitk::Image::New(); output->Initialize(mitk::MakeScalarPixelType(), 3, outputDim); output->SetSpacing(inputImage->GetGeometry()->GetSpacing()); output->SetImportVolume(outputData, 0, 0, mitk::Image::ReferenceMemory); return output; } -mitk::Image::Pointer mitk::PhotoacousticImage::ApplyBeamforming(mitk::Image::Pointer inputImage, BeamformingSettings config, std::function progressHandle) +mitk::Image::Pointer mitk::PhotoacousticImage::ApplyBeamforming(mitk::Image::Pointer inputImage, BeamformingSettings config, std::string& message, std::function progressHandle) { config.RecordTime = config.RecordTime - (float)(config.upperCutoff) / (float)inputImage->GetDimension(1) * config.RecordTime; // adjust the recorded time lost by cropping progressHandle(0, "cropping image"); if (!config.partial) { config.CropBounds[0] = 0; config.CropBounds[1] = inputImage->GetDimension(2) - 1; } Image::Pointer processedImage = ApplyCropping(inputImage, config.upperCutoff, 0, 0, 0, config.CropBounds[0], config.CropBounds[1]); config.inputDim[0] = processedImage->GetDimension(0); config.inputDim[1] = processedImage->GetDimension(1); config.inputDim[2] = processedImage->GetDimension(2); // perform the beamforming m_BeamformingFilter->SetInput(processedImage); m_BeamformingFilter->Configure(config); m_BeamformingFilter->SetProgressHandle(progressHandle); m_BeamformingFilter->UpdateLargestPossibleRegion(); processedImage = m_BeamformingFilter->GetOutput(); + message = m_BeamformingFilter->GetMessageString(); return processedImage; } mitk::Image::Pointer mitk::PhotoacousticImage::BandpassFilter(mitk::Image::Pointer data, float recordTime, float BPHighPass, float BPLowPass, float alpha) { bool powerOfTwo = false; int finalPower = 0; for (int i = 1; pow(2, i) <= data->GetDimension(1); ++i) { finalPower = i; if (pow(2, i) == data->GetDimension(1)) { powerOfTwo = true; } } if (!powerOfTwo) { unsigned int dim[2] = { data->GetDimension(0), (unsigned int)pow(2,finalPower+1)}; data = ApplyResampling(data, dim); } MITK_INFO << data->GetDimension(0); // do a fourier transform, multiply with an appropriate window for the filter, and transform back typedef float PixelType; typedef itk::Image< PixelType, 3 > RealImageType; RealImageType::Pointer image; mitk::CastToItkImage(data, image); typedef itk::FFT1DRealToComplexConjugateImageFilter ForwardFFTFilterType; typedef ForwardFFTFilterType::OutputImageType ComplexImageType; ForwardFFTFilterType::Pointer forwardFFTFilter = ForwardFFTFilterType::New(); forwardFFTFilter->SetInput(image); forwardFFTFilter->SetDirection(1); try { forwardFFTFilter->UpdateOutputInformation(); } catch (itk::ExceptionObject & error) { std::cerr << "Error: " << error << std::endl; MITK_WARN << "Bandpass could not be applied"; return data; } float singleVoxel = 1 / (recordTime / data->GetDimension(1)) / 2 / 1000; float cutoffPixelHighPass = std::min(BPHighPass / singleVoxel, (float)data->GetDimension(1) / 2); float cutoffPixelLowPass = std::min(BPLowPass / singleVoxel, (float)data->GetDimension(1) / 2 - cutoffPixelHighPass); RealImageType::Pointer fftMultiplicator = BPFunction(data, cutoffPixelHighPass, cutoffPixelLowPass, alpha); typedef itk::MultiplyImageFilter< ComplexImageType, RealImageType, ComplexImageType > MultiplyFilterType; MultiplyFilterType::Pointer multiplyFilter = MultiplyFilterType::New(); multiplyFilter->SetInput1(forwardFFTFilter->GetOutput()); multiplyFilter->SetInput2(fftMultiplicator); /*itk::ComplexToModulusImageFilter::Pointer toReal = itk::ComplexToModulusImageFilter::New(); toReal->SetInput(forwardFFTFilter->GetOutput()); return GrabItkImageMemory(toReal->GetOutput()); return GrabItkImageMemory(fftMultiplicator); *///DEBUG typedef itk::FFT1DComplexConjugateToRealImageFilter< ComplexImageType, RealImageType > InverseFilterType; InverseFilterType::Pointer inverseFFTFilter = InverseFilterType::New(); inverseFFTFilter->SetInput(multiplyFilter->GetOutput()); inverseFFTFilter->SetDirection(1); return GrabItkImageMemory(inverseFFTFilter->GetOutput()); } itk::Image::Pointer mitk::PhotoacousticImage::BPFunction(mitk::Image::Pointer reference, int cutoffFrequencyPixelHighPass, int cutoffFrequencyPixelLowPass, float alpha) { float* imageData = new float[reference->GetDimension(0)*reference->GetDimension(1)]; // tukey window float width = reference->GetDimension(1) / 2 - (float)cutoffFrequencyPixelHighPass - (float)cutoffFrequencyPixelLowPass; float center = (float)cutoffFrequencyPixelHighPass / 2 + width / 2; MITK_INFO << width << "width " << center << "center " << alpha; for (unsigned int n = 0; n < reference->GetDimension(1); ++n) { imageData[reference->GetDimension(0)*n] = 0; } for (int n = 0; n < width; ++n) { if (n <= (alpha*(width - 1)) / 2) { imageData[reference->GetDimension(0)*(int)(n + center - (width / 2))] = (1 + cos(M_PI*(2 * n / (alpha*(width - 1)) - 1))) / 2; } else if (n >= (width - 1)*(1 - alpha / 2) && n <= (width - 1)) { imageData[reference->GetDimension(0)*(int)(n + center - (width / 2))] = (1 + cos(M_PI*(2 * n / (alpha*(width - 1)) + 1 - 2 / alpha))) / 2; } else { imageData[reference->GetDimension(0)*(int)(n + center - (width / 2))] = 1; } } // Butterworth-Filter /* // first, write the HighPass if (cutoffFrequencyPixelHighPass != reference->GetDimension(1) / 2) { for (int n = 0; n < reference->GetDimension(1) / 2; ++n) { imageData[reference->GetDimension(0)*n] = 1 / (1 + pow( (float)n / (float)(reference->GetDimension(1) / 2 - cutoffFrequencyPixelHighPass) , 2 * butterworthOrder)); } } else { for (int n = 0; n < reference->GetDimension(1) / 2; ++n) { imageData[reference->GetDimension(0)*n] = 1; } } // now, the LowPass for (int n = 0; n < reference->GetDimension(1) / 2; ++n) { imageData[reference->GetDimension(0)*n] *= 1 / (1 + pow( (float)(reference->GetDimension(1) / 2 - 1 - n) / (float)(reference->GetDimension(1) / 2 - cutoffFrequencyPixelLowPass) , 2 * butterworthOrder)); } */ // mirror the first half of the image for (unsigned int n = reference->GetDimension(1) / 2; n < reference->GetDimension(1); ++n) { imageData[reference->GetDimension(0)*n] = imageData[(reference->GetDimension(1) - (n + 1)) * reference->GetDimension(0)]; } // copy and paste to all lines for (unsigned int line = 1; line < reference->GetDimension(0); ++line) { for (unsigned int sample = 0; sample < reference->GetDimension(1); ++sample) { imageData[reference->GetDimension(0)*sample + line] = imageData[reference->GetDimension(0)*sample]; } } typedef itk::Image< float, 3U > ImageType; ImageType::RegionType region; ImageType::IndexType start; start.Fill(0); region.SetIndex(start); ImageType::SizeType size; size[0] = reference->GetDimension(0); size[1] = reference->GetDimension(1); size[2] = reference->GetDimension(2); region.SetSize(size); ImageType::SpacingType SpacingItk; SpacingItk[0] = reference->GetGeometry()->GetSpacing()[0]; SpacingItk[1] = reference->GetGeometry()->GetSpacing()[1]; SpacingItk[2] = reference->GetGeometry()->GetSpacing()[2]; ImageType::Pointer image = ImageType::New(); image->SetRegions(region); image->Allocate(); image->FillBuffer(itk::NumericTraits::Zero); image->SetSpacing(SpacingItk); ImageType::IndexType pixelIndex; for (ImageType::IndexValueType slice = 0; slice < reference->GetDimension(2); ++slice) { for (ImageType::IndexValueType line = 0; line < reference->GetDimension(0); ++line) { for (ImageType::IndexValueType sample = 0; sample < reference->GetDimension(1); ++sample) { pixelIndex[0] = line; pixelIndex[1] = sample; pixelIndex[2] = slice; image->SetPixel(pixelIndex, imageData[line + sample*reference->GetDimension(0)]); } } } delete[] imageData; return image; } \ No newline at end of file diff --git a/Plugins/org.mitk.gui.qt.photoacoustics.imageprocessing/src/internal/PAImageProcessing.cpp b/Plugins/org.mitk.gui.qt.photoacoustics.imageprocessing/src/internal/PAImageProcessing.cpp index 33d9af950b..4bd7e2eed5 100644 --- a/Plugins/org.mitk.gui.qt.photoacoustics.imageprocessing/src/internal/PAImageProcessing.cpp +++ b/Plugins/org.mitk.gui.qt.photoacoustics.imageprocessing/src/internal/PAImageProcessing.cpp @@ -1,1115 +1,1150 @@ /*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ // Blueberry #include #include // Qmitk #include "PAImageProcessing.h" // Qt #include #include #include #include //mitk image #include #include "mitkPhotoacousticImage.h" #include "mitkPhotoacousticBeamformingFilter.h" //other #include #include #include const std::string PAImageProcessing::VIEW_ID = "org.mitk.views.paimageprocessing"; PAImageProcessing::PAImageProcessing() : m_ResampleSpacing(0), m_UseLogfilter(false), m_FilterBank(mitk::PhotoacousticImage::New()) { qRegisterMetaType(); qRegisterMetaType(); } void PAImageProcessing::SetFocus() { m_Controls.buttonApplyBModeFilter->setFocus(); } void PAImageProcessing::CreateQtPartControl(QWidget *parent) { // create GUI widgets from the Qt Designer's .ui file m_Controls.setupUi(parent); connect(m_Controls.buttonApplyBModeFilter, SIGNAL(clicked()), this, SLOT(StartBmodeThread())); connect(m_Controls.DoResampling, SIGNAL(clicked()), this, SLOT(UseResampling())); connect(m_Controls.Logfilter, SIGNAL(clicked()), this, SLOT(UseLogfilter())); connect(m_Controls.ResamplingValue, SIGNAL(valueChanged(double)), this, SLOT(SetResampling())); connect(m_Controls.buttonApplyBeamforming, SIGNAL(clicked()), this, SLOT(StartBeamformingThread())); connect(m_Controls.buttonApplyCropFilter, SIGNAL(clicked()), this, SLOT(StartCropThread())); connect(m_Controls.buttonApplyBandpass, SIGNAL(clicked()), this, SLOT(StartBandpassThread())); connect(m_Controls.UseImageSpacing, SIGNAL(clicked()), this, SLOT(UseImageSpacing())); connect(m_Controls.ScanDepth, SIGNAL(valueChanged(double)), this, SLOT(UpdateImageInfo())); connect(m_Controls.SpeedOfSound, SIGNAL(valueChanged(double)), this, SLOT(UpdateImageInfo())); connect(m_Controls.SpeedOfSound, SIGNAL(valueChanged(double)), this, SLOT(ChangedSOSBeamforming())); connect(m_Controls.BPSpeedOfSound, SIGNAL(valueChanged(double)), this, SLOT(ChangedSOSBandpass())); connect(m_Controls.Samples, SIGNAL(valueChanged(int)), this, SLOT(UpdateImageInfo())); connect(m_Controls.UseImageSpacing, SIGNAL(clicked()), this, SLOT(UpdateImageInfo())); - connect(m_Controls.boundLow, SIGNAL(valueChanged(int)), this, SLOT(UpdateBounds())); - connect(m_Controls.boundHigh, SIGNAL(valueChanged(int)), this, SLOT(UpdateBounds())); - connect(m_Controls.Partial, SIGNAL(clicked()), this, SLOT(UpdateBounds())); + connect(m_Controls.boundLow, SIGNAL(valueChanged(int)), this, SLOT(LowerSliceBoundChanged())); + connect(m_Controls.boundHigh, SIGNAL(valueChanged(int)), this, SLOT(UpperSliceBoundChanged())); + connect(m_Controls.Partial, SIGNAL(clicked()), this, SLOT(SliceBoundsEnabled())); connect(m_Controls.BatchProcessing, SIGNAL(clicked()), this, SLOT(BatchProcessing())); connect(m_Controls.StepBeamforming, SIGNAL(clicked()), this, SLOT(UpdateSaveBoxes())); connect(m_Controls.StepCropping, SIGNAL(clicked()), this, SLOT(UpdateSaveBoxes())); connect(m_Controls.StepBandpass, SIGNAL(clicked()), this, SLOT(UpdateSaveBoxes())); connect(m_Controls.StepBMode, SIGNAL(clicked()), this, SLOT(UpdateSaveBoxes())); UpdateSaveBoxes(); m_Controls.DoResampling->setChecked(false); m_Controls.ResamplingValue->setEnabled(false); m_Controls.progressBar->setMinimum(0); m_Controls.progressBar->setMaximum(100); m_Controls.progressBar->setVisible(false); m_Controls.UseImageSpacing->setToolTip("Image spacing of y-Axis must be in us, x-Axis in mm."); m_Controls.UseImageSpacing->setToolTipDuration(5000); m_Controls.ProgressInfo->setVisible(false); m_Controls.UseBP->hide(); #ifndef PHOTOACOUSTICS_USE_GPU m_Controls.UseGPUBf->setEnabled(false); m_Controls.UseGPUBf->setChecked(false); m_Controls.UseGPUBmode->setEnabled(false); m_Controls.UseGPUBmode->setChecked(false); #endif UseImageSpacing(); } void PAImageProcessing::ChangedSOSBandpass() { m_Controls.SpeedOfSound->setValue(m_Controls.BPSpeedOfSound->value()); } void PAImageProcessing::ChangedSOSBeamforming() { m_Controls.BPSpeedOfSound->setValue(m_Controls.SpeedOfSound->value()); } std::vector splitpath( const std::string& str , const std::set delimiters) { std::vector result; char const* pch = str.c_str(); char const* start = pch; for (; *pch; ++pch) { if (delimiters.find(*pch) != delimiters.end()) { if (start != pch) { std::string str(start, pch); result.push_back(str); } else { result.push_back(""); } start = pch + 1; } } result.push_back(start); return result; } void PAImageProcessing::UpdateSaveBoxes() { if (m_Controls.StepBeamforming->isChecked()) m_Controls.SaveBeamforming->setEnabled(true); else m_Controls.SaveBeamforming->setEnabled(false); if (m_Controls.StepCropping->isChecked()) m_Controls.SaveCropping->setEnabled(true); else m_Controls.SaveCropping->setEnabled(false); if (m_Controls.StepBandpass->isChecked()) m_Controls.SaveBandpass->setEnabled(true); else m_Controls.SaveBandpass->setEnabled(false); if (m_Controls.StepBMode->isChecked()) m_Controls.SaveBMode->setEnabled(true); else m_Controls.SaveBMode->setEnabled(false); } void PAImageProcessing::BatchProcessing() { QFileDialog LoadDialog(nullptr, "Select Files to be processed"); LoadDialog.setFileMode(QFileDialog::FileMode::ExistingFiles); LoadDialog.setNameFilter(tr("Images (*.nrrd)")); LoadDialog.setViewMode(QFileDialog::Detail); QStringList fileNames; if (LoadDialog.exec()) fileNames = LoadDialog.selectedFiles(); QString saveDir = QFileDialog::getExistingDirectory(nullptr, tr("Select Directory To Save To"), "", QFileDialog::ShowDirsOnly | QFileDialog::DontResolveSymlinks); DisableControls(); std::set delims{'/'}; bool doSteps[] = { m_Controls.StepBeamforming->isChecked(), m_Controls.StepCropping->isChecked() , m_Controls.StepBandpass->isChecked(), m_Controls.StepBMode->isChecked() }; bool saveSteps[] = { m_Controls.SaveBeamforming->isChecked(), m_Controls.SaveCropping->isChecked() , m_Controls.SaveBandpass->isChecked(), m_Controls.SaveBMode->isChecked() }; for (int fileNumber = 0; fileNumber < fileNames.size(); ++fileNumber) { m_Controls.progressBar->setValue(0); m_Controls.progressBar->setVisible(true); m_Controls.ProgressInfo->setVisible(true); m_Controls.ProgressInfo->setText("loading file"); QString filename = fileNames.at(fileNumber); auto split = splitpath(filename.toStdString(), delims); std::string imageName = split.at(split.size()-1); // remove ".nrrd" imageName = imageName.substr(0, imageName.size()-5); mitk::Image::Pointer image = mitk::IOUtil::LoadImage(filename.toStdString().c_str()); UpdateBFSettings(image); // Beamforming if (doSteps[0]) { std::function progressHandle = [this](int progress, std::string progressInfo) { this->UpdateProgress(progress, progressInfo); }; m_Controls.progressBar->setValue(100); + std::string errorMessage = ""; - image = m_FilterBank->ApplyBeamforming(image, BFconfig, progressHandle); + image = m_FilterBank->ApplyBeamforming(image, BFconfig, errorMessage, progressHandle); if (saveSteps[0]) { std::string saveFileName = saveDir.toStdString() + "/" + imageName + " beamformed" + ".nrrd"; mitk::IOUtil::Save(image, saveFileName); } } // Cropping if (doSteps[1]) { m_Controls.ProgressInfo->setText("cropping image"); image = m_FilterBank->ApplyCropping(image, m_Controls.CutoffAbove->value(), m_Controls.CutoffBelow->value(), 0, 0, 0, image->GetDimension(2) - 1); if (saveSteps[1]) { std::string saveFileName = saveDir.toStdString() + "/" + imageName + " cropped" + ".nrrd"; mitk::IOUtil::Save(image, saveFileName); } } // Bandpass if (doSteps[2]) { m_Controls.ProgressInfo->setText("applying bandpass"); float recordTime = image->GetDimension(1)*image->GetGeometry()->GetSpacing()[1] / 1000 / m_Controls.BPSpeedOfSound->value(); // add a safeguard so the program does not chrash when applying a Bandpass that reaches out of the bounds of the image float maxFrequency = 1 / (recordTime / image->GetDimension(1)) * image->GetDimension(1) / 2 / 2 / 1000; float BPHighPass = 1000000 * m_Controls.BPhigh->value(); // [Hz] float BPLowPass = maxFrequency - 1000000 * m_Controls.BPlow->value(); // [Hz] if (BPLowPass > maxFrequency && m_Controls.UseBP->isChecked()) { QMessageBox Msgbox; Msgbox.setText("LowPass too low, disabled it."); Msgbox.exec(); BPLowPass = 0; } if (BPLowPass < 0 && m_Controls.UseBP->isChecked()) { QMessageBox Msgbox; Msgbox.setText("LowPass too high, disabled it."); Msgbox.exec(); BPLowPass = 0; } if (BPHighPass > maxFrequency && m_Controls.UseBP->isChecked()) { QMessageBox Msgbox; Msgbox.setText("HighPass too high, disabled it."); Msgbox.exec(); BPHighPass = 0; } if (BPHighPass > maxFrequency - BFconfig.BPLowPass) { QMessageBox Msgbox; Msgbox.setText("HighPass higher than LowPass, disabled both."); Msgbox.exec(); BPHighPass = 0; BPLowPass = 0; } image = m_FilterBank->BandpassFilter(image, recordTime, BPHighPass, BPLowPass, m_Controls.BPFalloff->value()); if (saveSteps[2]) { std::string saveFileName = saveDir.toStdString() + "/" + imageName + " bandpassed" + ".nrrd"; mitk::IOUtil::Save(image, saveFileName); } } // Bmode if (doSteps[3]) { m_Controls.ProgressInfo->setText("applying bmode filter"); bool useGPU = m_Controls.UseGPUBmode->isChecked(); if (m_Controls.BModeMethod->currentText() == "Absolute Filter") image = m_FilterBank->ApplyBmodeFilter(image, mitk::PhotoacousticImage::BModeMethod::Abs, useGPU, m_UseLogfilter, m_ResampleSpacing); else if (m_Controls.BModeMethod->currentText() == "Envelope Detection") image = m_FilterBank->ApplyBmodeFilter(image, mitk::PhotoacousticImage::BModeMethod::ShapeDetection, useGPU, m_UseLogfilter, m_ResampleSpacing); if (saveSteps[3]) { std::string saveFileName = saveDir.toStdString() + "/" + imageName + " bmode" + ".nrrd"; mitk::IOUtil::Save(image, saveFileName); } } m_Controls.progressBar->setVisible(false); } EnableControls(); } void PAImageProcessing::StartBeamformingThread() { QList nodes = this->GetDataManagerSelection(); if (nodes.empty()) return; mitk::DataStorage::Pointer storage = this->GetDataStorage(); mitk::DataNode::Pointer node = nodes.front(); if (!node) { // Nothing selected. Inform the user and return QMessageBox::information(NULL, "Template", "Please load and select an image before starting image processing."); return; } mitk::BaseData* data = node->GetData(); if (data) { // test if this data item is an image or not (could also be a surface or something totally different) mitk::Image* image = dynamic_cast(data); if (image) { UpdateBFSettings(image); std::stringstream message; std::string name; message << "Performing beamforming for image "; if (node->GetName(name)) { // a property called "name" was found for this DataNode message << "'" << name << "'"; m_OldNodeName = name; } else m_OldNodeName = " "; message << "."; MITK_INFO << message.str(); m_Controls.progressBar->setValue(0); m_Controls.progressBar->setVisible(true); m_Controls.ProgressInfo->setVisible(true); m_Controls.ProgressInfo->setText("started"); m_Controls.buttonApplyBeamforming->setText("working..."); DisableControls(); BeamformingThread *thread = new BeamformingThread(); connect(thread, &BeamformingThread::result, this, &PAImageProcessing::HandleBeamformingResults); connect(thread, &BeamformingThread::updateProgress, this, &PAImageProcessing::UpdateProgress); + connect(thread, &BeamformingThread::message, this, &PAImageProcessing::PAMessageBox); connect(thread, &BeamformingThread::finished, thread, &QObject::deleteLater); thread->setConfig(BFconfig); thread->setInputImage(image); thread->setFilterBank(m_FilterBank); MITK_INFO << "Started new thread for Beamforming"; thread->start(); } } } void PAImageProcessing::HandleBeamformingResults(mitk::Image::Pointer image) { auto newNode = mitk::DataNode::New(); newNode->SetData(image); // name the new Data node std::stringstream newNodeName; newNodeName << m_OldNodeName << " "; if (BFconfig.Algorithm == mitk::BeamformingSettings::BeamformingAlgorithm::DAS) newNodeName << "DAS bf, "; else if (BFconfig.Algorithm == mitk::BeamformingSettings::BeamformingAlgorithm::DMAS) newNodeName << "DMAS bf, "; if (BFconfig.DelayCalculationMethod == mitk::BeamformingSettings::DelayCalc::QuadApprox) newNodeName << "q. delay"; if (BFconfig.DelayCalculationMethod == mitk::BeamformingSettings::DelayCalc::Spherical) newNodeName << "s. delay"; newNode->SetName(newNodeName.str()); // update level window for the current dynamic range mitk::LevelWindow levelWindow; newNode->GetLevelWindow(levelWindow); auto data = newNode->GetData(); levelWindow.SetAuto(dynamic_cast(data), true, true); newNode->SetLevelWindow(levelWindow); // add new node to data storage this->GetDataStorage()->Add(newNode); // disable progress bar m_Controls.progressBar->setVisible(false); m_Controls.ProgressInfo->setVisible(false); m_Controls.buttonApplyBeamforming->setText("Apply Beamforming"); EnableControls(); // update rendering mitk::RenderingManager::GetInstance()->InitializeViews( dynamic_cast(data)->GetGeometry(), mitk::RenderingManager::REQUEST_UPDATE_ALL, true); mitk::RenderingManager::GetInstance()->RequestUpdateAll(); } void PAImageProcessing::StartBmodeThread() { QList nodes = this->GetDataManagerSelection(); if (nodes.empty()) return; mitk::DataStorage::Pointer storage = this->GetDataStorage(); mitk::DataNode::Pointer node = nodes.front(); if (!node) { // Nothing selected. Inform the user and return QMessageBox::information(NULL, "Template", "Please load and select an image before starting image processing."); return; } mitk::BaseData* data = node->GetData(); if (data) { // test if this data item is an image or not (could also be a surface or something totally different) mitk::Image* image = dynamic_cast(data); if (image) { UpdateBFSettings(image); std::stringstream message; std::string name; message << "Performing image processing for image "; if (node->GetName(name)) { // a property called "name" was found for this DataNode message << "'" << name << "'"; m_OldNodeName = name; } else m_OldNodeName = " "; message << "."; MITK_INFO << message.str(); m_Controls.buttonApplyBModeFilter->setText("working..."); DisableControls(); BmodeThread *thread = new BmodeThread(); connect(thread, &BmodeThread::result, this, &PAImageProcessing::HandleBmodeResults); connect(thread, &BmodeThread::finished, thread, &QObject::deleteLater); bool useGPU = m_Controls.UseGPUBmode->isChecked(); if(m_Controls.BModeMethod->currentText() == "Absolute Filter") thread->setConfig(m_UseLogfilter, m_ResampleSpacing, mitk::PhotoacousticImage::BModeMethod::Abs, useGPU); else if(m_Controls.BModeMethod->currentText() == "Envelope Detection") thread->setConfig(m_UseLogfilter, m_ResampleSpacing, mitk::PhotoacousticImage::BModeMethod::ShapeDetection, useGPU); thread->setInputImage(image); thread->setFilterBank(m_FilterBank); MITK_INFO << "Started new thread for Image Processing"; thread->start(); } } } void PAImageProcessing::HandleBmodeResults(mitk::Image::Pointer image) { auto newNode = mitk::DataNode::New(); newNode->SetData(image); // name the new Data node std::stringstream newNodeName; newNodeName << m_OldNodeName << " "; newNodeName << "B-Mode"; newNode->SetName(newNodeName.str()); // update level window for the current dynamic range mitk::LevelWindow levelWindow; newNode->GetLevelWindow(levelWindow); auto data = newNode->GetData(); levelWindow.SetAuto(dynamic_cast(data), true, true); newNode->SetLevelWindow(levelWindow); // add new node to data storage this->GetDataStorage()->Add(newNode); // disable progress bar m_Controls.progressBar->setVisible(false); m_Controls.buttonApplyBModeFilter->setText("Apply B-mode Filter"); EnableControls(); // update rendering mitk::RenderingManager::GetInstance()->InitializeViews( dynamic_cast(data)->GetGeometry(), mitk::RenderingManager::REQUEST_UPDATE_ALL, true); mitk::RenderingManager::GetInstance()->RequestUpdateAll(); } void PAImageProcessing::StartCropThread() { QList nodes = this->GetDataManagerSelection(); if (nodes.empty()) return; mitk::DataStorage::Pointer storage = this->GetDataStorage(); mitk::DataNode::Pointer node = nodes.front(); if (!node) { // Nothing selected. Inform the user and return QMessageBox::information(NULL, "Template", "Please load and select an image before starting image cropping."); return; } mitk::BaseData* data = node->GetData(); if (data) { // test if this data item is an image or not (could also be a surface or something totally different) mitk::Image* image = dynamic_cast(data); if (image) { UpdateBFSettings(image); std::stringstream message; std::string name; message << "Performing image cropping for image "; if (node->GetName(name)) { // a property called "name" was found for this DataNode message << "'" << name << "'"; m_OldNodeName = name; } else m_OldNodeName = " "; message << "."; MITK_INFO << message.str(); m_Controls.buttonApplyCropFilter->setText("working..."); DisableControls(); CropThread *thread = new CropThread(); connect(thread, &CropThread::result, this, &PAImageProcessing::HandleCropResults); connect(thread, &CropThread::finished, thread, &QObject::deleteLater); thread->setConfig(m_Controls.CutoffAbove->value(), m_Controls.CutoffBelow->value()); thread->setInputImage(image); thread->setFilterBank(m_FilterBank); MITK_INFO << "Started new thread for Image Cropping"; thread->start(); } } } void PAImageProcessing::HandleCropResults(mitk::Image::Pointer image) { auto newNode = mitk::DataNode::New(); newNode->SetData(image); // name the new Data node std::stringstream newNodeName; newNodeName << m_OldNodeName << " "; newNodeName << "Cropped"; newNode->SetName(newNodeName.str()); // update level window for the current dynamic range mitk::LevelWindow levelWindow; newNode->GetLevelWindow(levelWindow); auto data = newNode->GetData(); levelWindow.SetAuto(dynamic_cast(data), true, true); newNode->SetLevelWindow(levelWindow); // add new node to data storage this->GetDataStorage()->Add(newNode); m_Controls.buttonApplyCropFilter->setText("Apply Crop Filter"); EnableControls(); // update rendering mitk::RenderingManager::GetInstance()->InitializeViews( dynamic_cast(data)->GetGeometry(), mitk::RenderingManager::REQUEST_UPDATE_ALL, true); mitk::RenderingManager::GetInstance()->RequestUpdateAll(); } void PAImageProcessing::StartBandpassThread() { QList nodes = this->GetDataManagerSelection(); if (nodes.empty()) return; mitk::DataStorage::Pointer storage = this->GetDataStorage(); mitk::DataNode::Pointer node = nodes.front(); if (!node) { // Nothing selected. Inform the user and return QMessageBox::information(NULL, "Template", "Please load and select an image before starting image cropping."); return; } mitk::BaseData* data = node->GetData(); if (data) { // test if this data item is an image or not (could also be a surface or something totally different) mitk::Image* image = dynamic_cast(data); if (image) { UpdateBFSettings(image); std::stringstream message; std::string name; message << "Performing Bandpass filter on image "; if (node->GetName(name)) { // a property called "name" was found for this DataNode message << "'" << name << "'"; m_OldNodeName = name; } else m_OldNodeName = " "; message << "."; MITK_INFO << message.str(); m_Controls.buttonApplyBandpass->setText("working..."); DisableControls(); BandpassThread *thread = new BandpassThread(); connect(thread, &BandpassThread::result, this, &PAImageProcessing::HandleBandpassResults); connect(thread, &BandpassThread::finished, thread, &QObject::deleteLater); float recordTime = image->GetDimension(1)*image->GetGeometry()->GetSpacing()[1] / 1000 / m_Controls.BPSpeedOfSound->value(); // add a safeguard so the program does not chrash when applying a Bandpass that reaches out of the bounds of the image float maxFrequency = 1 / (recordTime / image->GetDimension(1)) * image->GetDimension(1) / 2 / 2 / 1000; float BPHighPass = 1000000 * m_Controls.BPhigh->value(); // [Hz] float BPLowPass = maxFrequency - 1000000 * m_Controls.BPlow->value(); // [Hz] if (BPLowPass > maxFrequency && m_Controls.UseBP->isChecked()) { QMessageBox Msgbox; Msgbox.setText("LowPass too low, disabled it."); Msgbox.exec(); BPLowPass = 0; } if (BPLowPass < 0 && m_Controls.UseBP->isChecked()) { QMessageBox Msgbox; Msgbox.setText("LowPass too high, disabled it."); Msgbox.exec(); BPLowPass = 0; } if (BPHighPass > maxFrequency && m_Controls.UseBP->isChecked()) { QMessageBox Msgbox; Msgbox.setText("HighPass too high, disabled it."); Msgbox.exec(); BPHighPass = 0; } if (BPHighPass > maxFrequency - BFconfig.BPLowPass) { QMessageBox Msgbox; Msgbox.setText("HighPass higher than LowPass, disabled both."); Msgbox.exec(); BPHighPass = 0; BPLowPass = 0; } thread->setConfig(BPHighPass, BPLowPass, m_Controls.BPFalloff->value(), recordTime); thread->setInputImage(image); thread->setFilterBank(m_FilterBank); MITK_INFO << "Started new thread for Bandpass filter"; thread->start(); } } } void PAImageProcessing::HandleBandpassResults(mitk::Image::Pointer image) { auto newNode = mitk::DataNode::New(); newNode->SetData(image); // name the new Data node std::stringstream newNodeName; newNodeName << m_OldNodeName << " "; newNodeName << "Bandpassed"; newNode->SetName(newNodeName.str()); // update level window for the current dynamic range mitk::LevelWindow levelWindow; newNode->GetLevelWindow(levelWindow); auto data = newNode->GetData(); levelWindow.SetAuto(dynamic_cast(data), true, true); newNode->SetLevelWindow(levelWindow); // add new node to data storage this->GetDataStorage()->Add(newNode); m_Controls.buttonApplyBandpass->setText("Apply Bandpass"); EnableControls(); // update rendering mitk::RenderingManager::GetInstance()->InitializeViews( dynamic_cast(data)->GetGeometry(), mitk::RenderingManager::REQUEST_UPDATE_ALL, true); mitk::RenderingManager::GetInstance()->RequestUpdateAll(); } -void PAImageProcessing::UpdateBounds() +void PAImageProcessing::SliceBoundsEnabled() { if (!m_Controls.Partial->isChecked()) { m_Controls.boundLow->setEnabled(false); m_Controls.boundHigh->setEnabled(false); BFconfig.partial = false; return; } else { m_Controls.boundLow->setEnabled(true); m_Controls.boundHigh->setEnabled(true); BFconfig.partial = true; } + UpperSliceBoundChanged(); +} + +void PAImageProcessing::UpperSliceBoundChanged() +{ if(m_Controls.boundLow->value() > m_Controls.boundHigh->value()) + { + m_Controls.boundLow->setValue(m_Controls.boundHigh->value()); + BFconfig.CropBounds[0] = m_Controls.boundLow->value(); + BFconfig.CropBounds[1] = m_Controls.boundHigh->value(); + } + else + { + BFconfig.CropBounds[0] = m_Controls.boundLow->value(); + BFconfig.CropBounds[1] = m_Controls.boundHigh->value(); + } +} + +void PAImageProcessing::LowerSliceBoundChanged() +{ + if (m_Controls.boundLow->value() > m_Controls.boundHigh->value()) { m_Controls.boundHigh->setValue(m_Controls.boundLow->value()); BFconfig.CropBounds[0] = m_Controls.boundLow->value(); BFconfig.CropBounds[1] = m_Controls.boundHigh->value(); } else { BFconfig.CropBounds[0] = m_Controls.boundLow->value(); BFconfig.CropBounds[1] = m_Controls.boundHigh->value(); } } void PAImageProcessing::UpdateProgress(int progress, std::string progressInfo) { if (progress < 100) m_Controls.progressBar->setValue(progress); else m_Controls.progressBar->setValue(100); m_Controls.ProgressInfo->setText(progressInfo.c_str()); qApp->processEvents(); } +void PAImageProcessing::PAMessageBox(std::string message) +{ + if (0 != message.compare("noMessage")) + { + QMessageBox msgBox; + msgBox.setText(message.c_str()); + msgBox.exec(); + } +} + void PAImageProcessing::UpdateImageInfo() { QList nodes = this->GetDataManagerSelection(); if (nodes.empty()) return; mitk::DataNode::Pointer node = nodes.front(); if (!node) { // Nothing selected return; } mitk::BaseData* data = node->GetData(); if (data) { // test if this data item is an image or not (could also be a surface or something totally different) mitk::Image* image = dynamic_cast(data); if (image) { // beamforming configs if (m_Controls.UseImageSpacing->isChecked()) { m_Controls.ElementCount->setValue(image->GetDimension(0)); m_Controls.Pitch->setValue(image->GetGeometry()->GetSpacing()[0]); m_Controls.boundLow->setMaximum(image->GetDimension(2) - 1); m_Controls.boundHigh->setMaximum(image->GetDimension(2) - 1); } UpdateRecordTime(image); // bandpass configs float speedOfSound = m_Controls.BPSpeedOfSound->value(); // [m/s] float recordTime = image->GetDimension(1)*image->GetGeometry()->GetSpacing()[1] / 1000 / speedOfSound; std::stringstream frequency; float maxFrequency = 1 / (recordTime / image->GetDimension(1)) * image->GetDimension(1) / 2 / 2 / 1000; frequency << maxFrequency / 1000000; //[MHz] frequency << "MHz"; m_Controls.BPhigh->setMaximum(maxFrequency / 1000000); m_Controls.BPlow->setMaximum(maxFrequency / 1000000); frequency << " is the maximal allowed frequency for the selected image."; m_Controls.BPhigh->setToolTip(frequency.str().c_str()); m_Controls.BPlow->setToolTip(frequency.str().c_str()); m_Controls.BPhigh->setToolTipDuration(5000); m_Controls.BPlow->setToolTipDuration(5000); } } } void PAImageProcessing::OnSelectionChanged( berry::IWorkbenchPart::Pointer /*source*/, const QList& nodes ) { // iterate all selected objects, adjust warning visibility foreach( mitk::DataNode::Pointer node, nodes ) { if( node.IsNotNull() && dynamic_cast(node->GetData()) ) { m_Controls.labelWarning->setVisible( false ); m_Controls.buttonApplyBModeFilter->setEnabled( true ); m_Controls.labelWarning2->setVisible(false); m_Controls.buttonApplyCropFilter->setEnabled(true); m_Controls.labelWarning3->setVisible(false); m_Controls.buttonApplyBandpass->setEnabled(true); m_Controls.labelWarning4->setVisible(false); m_Controls.buttonApplyBeamforming->setEnabled(true); UpdateImageInfo(); return; } } m_Controls.labelWarning->setVisible( true ); m_Controls.buttonApplyBModeFilter->setEnabled( false ); m_Controls.labelWarning2->setVisible(true); m_Controls.buttonApplyCropFilter->setEnabled(false); m_Controls.labelWarning3->setVisible(true); m_Controls.buttonApplyBandpass->setEnabled(false); m_Controls.labelWarning4->setVisible(true); m_Controls.buttonApplyBeamforming->setEnabled(false); } void PAImageProcessing::UseResampling() { if (m_Controls.DoResampling->isChecked()) { m_Controls.ResamplingValue->setEnabled(true); m_ResampleSpacing = m_Controls.ResamplingValue->value(); } else { m_Controls.ResamplingValue->setEnabled(false); m_ResampleSpacing = 0; } } void PAImageProcessing::UseLogfilter() { m_UseLogfilter = m_Controls.Logfilter->isChecked(); } void PAImageProcessing::SetResampling() { m_ResampleSpacing = m_Controls.ResamplingValue->value(); } void PAImageProcessing::UpdateBFSettings(mitk::Image::Pointer image) { if ("DAS" == m_Controls.BFAlgorithm->currentText()) BFconfig.Algorithm = mitk::BeamformingSettings::BeamformingAlgorithm::DAS; else if ("DMAS" == m_Controls.BFAlgorithm->currentText()) BFconfig.Algorithm = mitk::BeamformingSettings::BeamformingAlgorithm::DMAS; if ("Quad. Approx." == m_Controls.DelayCalculation->currentText()) { BFconfig.DelayCalculationMethod = mitk::BeamformingSettings::DelayCalc::QuadApprox; } else if ("Spherical Wave" == m_Controls.DelayCalculation->currentText()) { BFconfig.DelayCalculationMethod = mitk::BeamformingSettings::DelayCalc::Spherical; } if ("Von Hann" == m_Controls.Apodization->currentText()) { BFconfig.Apod = mitk::BeamformingSettings::Apodization::Hann; } else if ("Hamming" == m_Controls.Apodization->currentText()) { BFconfig.Apod = mitk::BeamformingSettings::Apodization::Hamm; } else if ("Box" == m_Controls.Apodization->currentText()) { BFconfig.Apod = mitk::BeamformingSettings::Apodization::Box; } BFconfig.Pitch = m_Controls.Pitch->value() / 1000; // [m] BFconfig.SpeedOfSound = m_Controls.SpeedOfSound->value(); // [m/s] BFconfig.SamplesPerLine = m_Controls.Samples->value(); BFconfig.ReconstructionLines = m_Controls.Lines->value(); BFconfig.TransducerElements = m_Controls.ElementCount->value(); + BFconfig.apodizationArraySize = m_Controls.ElementCount->value(); BFconfig.Angle = m_Controls.Angle->value(); // [deg] BFconfig.UseBP = m_Controls.UseBP->isChecked(); BFconfig.UseGPU = m_Controls.UseGPUBf->isChecked(); BFconfig.upperCutoff = m_Controls.Cutoff->value(); UpdateRecordTime(image); - UpdateBounds(); + SliceBoundsEnabled(); } void PAImageProcessing::UpdateRecordTime(mitk::Image::Pointer image) { if (m_Controls.UseImageSpacing->isChecked()) { BFconfig.RecordTime = image->GetDimension(1)*image->GetGeometry()->GetSpacing()[1] / 1000000; // [s] BFconfig.TimeSpacing = image->GetGeometry()->GetSpacing()[1] / 1000000; MITK_INFO << "Calculated Scan Depth of " << BFconfig.RecordTime * BFconfig.SpeedOfSound * 100 / 2<< "cm"; } else { BFconfig.RecordTime = 2 * m_Controls.ScanDepth->value() / 1000 / BFconfig.SpeedOfSound; // [s] BFconfig.TimeSpacing = BFconfig.RecordTime / image->GetDimension(1); } if ("US Image" == m_Controls.ImageType->currentText()) { BFconfig.isPhotoacousticImage = false; } else if ("PA Image" == m_Controls.ImageType->currentText()) { BFconfig.isPhotoacousticImage = true; } } void PAImageProcessing::EnableControls() { m_Controls.DoResampling->setEnabled(true); UseResampling(); m_Controls.Logfilter->setEnabled(true); m_Controls.buttonApplyBModeFilter->setEnabled(true); m_Controls.CutoffAbove->setEnabled(true); m_Controls.CutoffBelow->setEnabled(true); m_Controls.Cutoff->setEnabled(true); m_Controls.buttonApplyCropFilter->setEnabled(true); m_Controls.buttonApplyBandpass->setEnabled(true); m_Controls.Partial->setEnabled(true); m_Controls.boundHigh->setEnabled(true); m_Controls.boundLow->setEnabled(true); m_Controls.BFAlgorithm->setEnabled(true); m_Controls.DelayCalculation->setEnabled(true); m_Controls.ImageType->setEnabled(true); m_Controls.Apodization->setEnabled(true); m_Controls.UseBP->setEnabled(true); #ifdef PHOTOACOUSTICS_USE_GPU m_Controls.UseGPUBf->setEnabled(true); m_Controls.UseGPUBmode->setEnabled(true); #endif m_Controls.BPhigh->setEnabled(true); m_Controls.BPlow->setEnabled(true); m_Controls.BPFalloff->setEnabled(true); m_Controls.UseImageSpacing->setEnabled(true); UseImageSpacing(); m_Controls.Pitch->setEnabled(true); m_Controls.ElementCount->setEnabled(true); m_Controls.SpeedOfSound->setEnabled(true); m_Controls.Samples->setEnabled(true); m_Controls.Lines->setEnabled(true); m_Controls.Angle->setEnabled(true); m_Controls.buttonApplyBeamforming->setEnabled(true); } void PAImageProcessing::DisableControls() { m_Controls.DoResampling->setEnabled(false); m_Controls.ResamplingValue->setEnabled(false); m_Controls.Logfilter->setEnabled(false); m_Controls.buttonApplyBModeFilter->setEnabled(false); m_Controls.CutoffAbove->setEnabled(false); m_Controls.CutoffBelow->setEnabled(false); m_Controls.Cutoff->setEnabled(false); m_Controls.buttonApplyCropFilter->setEnabled(false); m_Controls.buttonApplyBandpass->setEnabled(false); m_Controls.Partial->setEnabled(false); m_Controls.boundHigh->setEnabled(false); m_Controls.boundLow->setEnabled(false); m_Controls.BFAlgorithm->setEnabled(false); m_Controls.DelayCalculation->setEnabled(false); m_Controls.ImageType->setEnabled(false); m_Controls.Apodization->setEnabled(false); m_Controls.UseBP->setEnabled(false); #ifdef PHOTOACOUSTICS_USE_GPU m_Controls.UseGPUBf->setEnabled(false); m_Controls.UseGPUBmode->setEnabled(false); #endif m_Controls.BPhigh->setEnabled(false); m_Controls.BPlow->setEnabled(false); m_Controls.BPFalloff->setEnabled(false); m_Controls.UseImageSpacing->setEnabled(false); m_Controls.ScanDepth->setEnabled(false); m_Controls.Pitch->setEnabled(false); m_Controls.ElementCount->setEnabled(false); m_Controls.SpeedOfSound->setEnabled(false); m_Controls.Samples->setEnabled(false); m_Controls.Lines->setEnabled(false); m_Controls.Angle->setEnabled(false); m_Controls.buttonApplyBeamforming->setEnabled(false); } void PAImageProcessing::UseImageSpacing() { if (m_Controls.UseImageSpacing->isChecked()) { m_Controls.ScanDepth->setDisabled(true); } else { m_Controls.ScanDepth->setEnabled(true); } } void BeamformingThread::run() { mitk::Image::Pointer resultImage; + std::string errorMessage = ""; std::function progressHandle = [this](int progress, std::string progressInfo) { emit updateProgress(progress, progressInfo); }; - resultImage = m_FilterBank->ApplyBeamforming(m_InputImage, m_BFconfig, progressHandle); + resultImage = m_FilterBank->ApplyBeamforming(m_InputImage, m_BFconfig, errorMessage, progressHandle); - emit result(resultImage); + emit result(resultImage); + emit message(errorMessage); } void BeamformingThread::setConfig(mitk::BeamformingSettings BFconfig) { m_BFconfig = BFconfig; } void BeamformingThread::setInputImage(mitk::Image::Pointer image) { m_InputImage = image; } void BmodeThread::run() { mitk::Image::Pointer resultImage; resultImage = m_FilterBank->ApplyBmodeFilter(m_InputImage, m_Method, m_UseGPU, m_UseLogfilter, m_ResampleSpacing); emit result(resultImage); } void BmodeThread::setConfig(bool useLogfilter, double resampleSpacing, mitk::PhotoacousticImage::BModeMethod method, bool useGPU) { m_UseLogfilter = useLogfilter; m_ResampleSpacing = resampleSpacing; m_Method = method; m_UseGPU = useGPU; } void BmodeThread::setInputImage(mitk::Image::Pointer image) { m_InputImage = image; } void CropThread::run() { mitk::Image::Pointer resultImage; resultImage = m_FilterBank->ApplyCropping(m_InputImage, m_CutAbove, m_CutBelow, 0, 0, 0, m_InputImage->GetDimension(2) - 1); emit result(resultImage); } void CropThread::setConfig(unsigned int CutAbove, unsigned int CutBelow) { m_CutAbove = CutAbove; m_CutBelow = CutBelow; } void CropThread::setInputImage(mitk::Image::Pointer image) { m_InputImage = image; } void BandpassThread::run() { mitk::Image::Pointer resultImage; resultImage = m_FilterBank->BandpassFilter(m_InputImage, m_RecordTime, m_BPHighPass, m_BPLowPass, m_TukeyAlpha); emit result(resultImage); } void BandpassThread::setConfig(float BPHighPass, float BPLowPass, float TukeyAlpha, float recordTime) { m_BPHighPass = BPHighPass; m_BPLowPass = BPLowPass; m_TukeyAlpha = TukeyAlpha; m_RecordTime = recordTime; } void BandpassThread::setInputImage(mitk::Image::Pointer image) { m_InputImage = image; } diff --git a/Plugins/org.mitk.gui.qt.photoacoustics.imageprocessing/src/internal/PAImageProcessing.h b/Plugins/org.mitk.gui.qt.photoacoustics.imageprocessing/src/internal/PAImageProcessing.h index 59b649bde8..aa9af80769 100644 --- a/Plugins/org.mitk.gui.qt.photoacoustics.imageprocessing/src/internal/PAImageProcessing.h +++ b/Plugins/org.mitk.gui.qt.photoacoustics.imageprocessing/src/internal/PAImageProcessing.h @@ -1,214 +1,218 @@ /*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #ifndef PAImageProcessing_h #define PAImageProcessing_h #include #include #include #include #include "ui_PAImageProcessingControls.h" #include "mitkPhotoacousticBeamformingFilter.h" #include "mitkPhotoacousticBeamformingSettings.h" Q_DECLARE_METATYPE(mitk::Image::Pointer) Q_DECLARE_METATYPE(std::string) class PAImageProcessing : public QmitkAbstractView { // this is needed for all Qt objects that should have a Qt meta-object // (everything that derives from QObject and wants to have signal/slots) Q_OBJECT public: static const std::string VIEW_ID; PAImageProcessing(); protected slots: /// \brief Called when the user clicks the GUI button - void UpdateBounds(); + void UpperSliceBoundChanged(); + void LowerSliceBoundChanged(); + void SliceBoundsEnabled(); void UseResampling(); void UseLogfilter(); void SetResampling(); void UseImageSpacing(); void UpdateImageInfo(); void UpdateRecordTime(mitk::Image::Pointer image); void HandleBeamformingResults(mitk::Image::Pointer image); void StartBeamformingThread(); void HandleBmodeResults(mitk::Image::Pointer image); void StartBmodeThread(); void HandleCropResults(mitk::Image::Pointer image); void StartCropThread(); void HandleBandpassResults(mitk::Image::Pointer image); void StartBandpassThread(); void UpdateProgress(int progress, std::string progressInfo); + void PAMessageBox(std::string message); void BatchProcessing(); void UpdateSaveBoxes(); void ChangedSOSBandpass(); void ChangedSOSBeamforming(); protected: virtual void CreateQtPartControl(QWidget *parent) override; virtual void SetFocus() override; /// \brief called by QmitkFunctionality when DataManager's selection has changed virtual void OnSelectionChanged( berry::IWorkbenchPart::Pointer source, const QList& nodes ) override; Ui::PAImageProcessingControls m_Controls; float m_ResampleSpacing; bool m_UseLogfilter; std::string m_OldNodeName; mitk::BeamformingSettings BFconfig; void UpdateBFSettings(mitk::Image::Pointer image); void EnableControls(); void DisableControls(); mitk::PhotoacousticImage::Pointer m_FilterBank; }; class BeamformingThread : public QThread { Q_OBJECT void run() Q_DECL_OVERRIDE; signals: void result(mitk::Image::Pointer); void updateProgress(int, std::string); + void message(std::string); public: void setConfig(mitk::BeamformingSettings BFconfig); void setInputImage(mitk::Image::Pointer image); void setFilterBank(mitk::PhotoacousticImage::Pointer filterBank) { m_FilterBank = filterBank; } protected: mitk::BeamformingSettings m_BFconfig; mitk::Image::Pointer m_InputImage; int m_Cutoff; mitk::PhotoacousticImage::Pointer m_FilterBank; }; class BmodeThread : public QThread { Q_OBJECT void run() Q_DECL_OVERRIDE; signals: void result(mitk::Image::Pointer); public: enum BModeMethod { ShapeDetection, Abs }; void setConfig(bool useLogfilter, double resampleSpacing, mitk::PhotoacousticImage::BModeMethod method, bool useGPU); void setInputImage(mitk::Image::Pointer image); void setFilterBank(mitk::PhotoacousticImage::Pointer filterBank) { m_FilterBank = filterBank; } protected: mitk::Image::Pointer m_InputImage; mitk::PhotoacousticImage::BModeMethod m_Method; bool m_UseLogfilter; double m_ResampleSpacing; bool m_UseGPU; mitk::PhotoacousticImage::Pointer m_FilterBank; }; class CropThread : public QThread { Q_OBJECT void run() Q_DECL_OVERRIDE; signals: void result(mitk::Image::Pointer); public: void setConfig(unsigned int CutAbove, unsigned int CutBelow); void setInputImage(mitk::Image::Pointer image); void setFilterBank(mitk::PhotoacousticImage::Pointer filterBank) { m_FilterBank = filterBank; } protected: mitk::Image::Pointer m_InputImage; unsigned int m_CutAbove; unsigned int m_CutBelow; mitk::PhotoacousticImage::Pointer m_FilterBank; }; class BandpassThread : public QThread { Q_OBJECT void run() Q_DECL_OVERRIDE; signals: void result(mitk::Image::Pointer); public: void setConfig(float BPHighPass, float BPLowPass, float TukeyAlpha, float recordTime); void setInputImage(mitk::Image::Pointer image); void setFilterBank(mitk::PhotoacousticImage::Pointer filterBank) { m_FilterBank = filterBank; } protected: mitk::Image::Pointer m_InputImage; float m_BPHighPass; float m_BPLowPass; float m_TukeyAlpha; float m_RecordTime; mitk::PhotoacousticImage::Pointer m_FilterBank; }; #endif // PAImageProcessing_h