VTK notes - vtkProbeFilter probe class

vtkProbeFilter class

  vtkProbeFilterThe class can sample data values ​​at specified points, and can filter scalar data (scalar values, vector values) of source data at specified points. It is a filter class; the class supports two types of input, including input vtkProbeFilterfilter geometry and The source data to be filtered ; when calculating the value of the specified point position, the interpolation algorithm will be used; vtkProbeFilterthe class copies the source data of each specified point position to the output.
  vtkProbeFilterThe class can filter the source data passed by a plane in the 3D volume data, similar to the MPR function;
  vtkProbeFilterthe class can also resample the data, or convert one data DataSetset to another DataSet. vtkImageDataFor example: Unstructured grids ( ) can be probed using volumes (3D vtkUnstructuredGrid) and the results visualized using volume rendering techniques. Or probe the data with a line or curve to generate an xy plot along that line or curve. If it is curve reconstruction surface projection, it is the function of CPR;
insert image description here

Figure 1: Function demo that vtkProbeFilter can realize

  vtkProbeFilterClasses can achieve section reconstruction and surface reconstruction. It should be noted that vtkProbeFilterthe output of the class is not vtkImageDatabut vtkDataSet; if vtkProbeFiltera class is given a plane, it will produce the vtkImageReslicesame rendering effect as the class, if it is given an irregular point set + topology , he will generate an irregular surface reconstruction;

example

Official website CurvedReformation example

namespace {
    
    
    vtkSmartPointer<vtkPolyData> SweepLine(vtkPolyData* line, double direction[3],
    	double distance, unsigned int cols);
}

int main(int, char*[])
{
    
    
    vtkNew<vtkNamedColors> colors;
    // Parse arguments
    std::string volumeFileName = "G:\\Data\\HeadMRVolume.mhd";
    std::string polyDataFileName = "G:\\Data\\polyline.vtk";;
    unsigned int resolution = 100;

    // Read the volume data
    vtkNew<vtkImageReader2Factory> imageFactory;
    vtkSmartPointer<vtkImageReader2> imageReader;
    imageReader.TakeReference(imageFactory->CreateImageReader2(volumeFileName.c_str()));
    imageReader->SetFileName(volumeFileName.c_str());
    imageReader->Update();

    // Read the Polyline
    vtkNew<vtkPolyDataReader> polyLineReader;
    polyLineReader->SetFileName(polyDataFileName.c_str());
    polyLineReader->Update();

    vtkNew<vtkSplineFilter> spline;
    spline->SetInputConnection(polyLineReader->GetOutputPort());
    spline->SetSubdivideToSpecified();
    spline->SetNumberOfSubdivisions(resolution);

    // Sweep the line to form a surface
    double direction[3];
    direction[0] = 0.0;
    direction[1] = 0.0;
    direction[2] = 1.0;
    double distance = 164;
    spline->Update();
    auto surface = SweepLine(spline->GetOutput(), direction, distance, 100);

    // Probe the volume with the extruded surface
    vtkNew<vtkProbeFilter> sampleVolume;
    sampleVolume->SetInputData(1, imageReader->GetOutput());
    sampleVolume->SetInputData(0, surface);

    // Compute a simple window/level based on scalar range
    vtkNew<vtkWindowLevelLookupTable> wlLut;
    double range = imageReader->GetOutput()->GetScalarRange()[1] -
        imageReader->GetOutput()->GetScalarRange()[0];
    double level = (imageReader->GetOutput()->GetScalarRange()[1] +
        imageReader->GetOutput()->GetScalarRange()[0]) / 2.0;
    wlLut->SetWindow(range);
    wlLut->SetLevel(level);

    // Create a mapper and actor.
    vtkNew<vtkDataSetMapper> mapper;
    mapper->SetInputConnection(sampleVolume->GetOutputPort());
    mapper->SetLookupTable(wlLut);
    mapper->SetScalarRange(0, 255);

    vtkNew<vtkActor> actor;
    actor->SetMapper(mapper);

    // Create a renderer, render window, and interactor
    vtkNew<vtkRenderer> renderer;
    vtkNew<vtkRenderWindow> renderWindow;
    renderWindow->AddRenderer(renderer);
    renderWindow->SetWindowName("CurvedReformation");

    vtkNew<vtkRenderWindowInteractor> renderWindowInteractor;
    renderWindowInteractor->SetRenderWindow(renderWindow);

    // Add the actors to the scene
    renderer->AddActor(actor);
    renderer->SetBackground(colors->GetColor3d("DarkSlateGray").GetData());

    // Set the camera for viewing medical images
    renderer->GetActiveCamera()->SetViewUp(0, 0, 1);
    renderer->GetActiveCamera()->SetPosition(0, 0, 0);
    renderer->GetActiveCamera()->SetFocalPoint(0, 1, 0);
    renderer->ResetCamera();

    // Render and interact
    renderWindow->Render();
    renderWindowInteractor->Start();
    return 0;
}

namespace {
    
    
    vtkSmartPointer<vtkPolyData> SweepLine(vtkPolyData* line, double direction[3],
        double distance, unsigned int cols)
    {
    
    
        unsigned int rows = line->GetNumberOfPoints();
        double spacing = distance / cols;
        vtkNew<vtkPolyData> surface;

        // Generate the points
        cols++;
        unsigned int numberOfPoints = rows * cols;
        unsigned int numberOfPolys = (rows - 1) * (cols - 1);
        vtkNew<vtkPoints> points;
        points->Allocate(numberOfPoints);
        vtkNew<vtkCellArray> polys;
        polys->Allocate(numberOfPolys * 4);

        double x[3];
        unsigned int cnt = 0;
        for (unsigned int row = 0; row < rows; row++) {
    
    
            for (unsigned int col = 0; col < cols; col++) {
    
    
                double p[3];
                line->GetPoint(row, p);
                x[0] = p[0] + direction[0] * col * spacing;
                x[1] = p[1] + direction[1] * col * spacing;
                x[2] = p[2] + direction[2] * col * spacing;
                points->InsertPoint(cnt++, x);
            }
        }
        // Generate the quads
        vtkIdType pts[4];
        for (unsigned int row = 0; row < rows - 1; row++) {
    
    
            for (unsigned int col = 0; col < cols - 1; col++) {
    
    
                pts[0] = col + row * (cols);
                pts[1] = pts[0] + 1;
                pts[2] = pts[0] + cols + 1;
                pts[3] = pts[0] + cols;
                polys->InsertNextCell(4, pts);
            }
        }
        surface->SetPoints(points);
        surface->SetPolys(polys);

        return surface;
    }
} // namespace

insert image description hereinsert image description hereinsert image description here
  If the SweepLinereturned vtkPolygonDatadisplay is displayed as a line, it will have the following effect:

vtkNew<vtkExtractEdges> extract;
extract->SetInputData(surface);
extract->Update();

vtkNew<vtkPolyDataMapper> mapper;
mapper->SetInputData(extract->GetOutput());

insert image description here
  After displaying the input polyline.vtk, it can be found that the white grid is generated by a red curve in one direction;
insert image description here

code reading

  main function
  1. Use vtkImageReader2class and vtkPolyDataReaderclass to interpret the file HeadMRVolume.mhd/HeadMRVolume.rawand file respectively polyline.vtk; HeadMRVolume.mhd/HeadMRVolume.rawinside is a set of CT sequences, and the file polyline.vtkis the geometric structure content of the polyline;
  2. Use vtkSplineFilterthe interpolation of the polyline to generate a curve;
  3. The method of use SweepLineis from the previous curve , to generate a vtkPolyDatasurface object, such as the grid shown in the figure above;
  4. Set vtkProbeFiltertwo inputs: input 0 is vtkPolyDatathe surface object surface; input 1 is the data source imageReader->GetOutput(); 5. Set the mapping table class
  according to the range of the data source data and ;   6. Set and ;   7. Enter the rendering pipeline;   ------------------------------------ -------------------------------------------------- -------------------------------------------------- ------------------------- SweepLine method   1. Calculate the number of rows to be generated as the number of points on the curve , and get the column direction according to the sum ofimageReader->GetOutput()vtkWindowLevelLookupTableSetWindowSetLevel
vtkDataSetMapperSetLookupTableSetScalarRange(0, 255)


  
surfaceline->GetNumberOfPoints()distancecolsspacing;
  2. Calculate surfacethe number of internal points numberOfPointsas rows * cols, and the number of units as (rows - 1) * (cols - 1);
  3. Generate vtkPointsclass objects pointsand vtkCellArrayclass objects polys;
  4. Traverse and generate the point coordinates of each row and column, and put them into points; each adjacent four points constitute a unit InsertNextCell(4, pts);
  5. Set the sum surfaceof the object ;   6. Return the object; it scans from the curve to one direction. If it scans from the curve to both ends, the result is similar to the function of CPR, but the result needs to be placed on a plane. Dimensional image;   convert the result into a result set, because the method uses the curve as the row Row, and extends the Col column in the direction; according to the data 1, the data set is converted;SetPoints(points)SetPolys(polys)
surface
  SweepLine
vtkProbeFiltervtkImageDataSweepLinedirection

    vtkSmartPointer<vtkPolyData> final_poly = sampleVolume->GetPolyDataOutput();
    vtkSmartPointer<vtkImageData> final_image = vtkSmartPointer<vtkImageData>::New();
    final_image->SetDimensions(resolution + 1, resolution + 1, 1);
    final_image->AllocateScalars(VTK_DOUBLE, 1);
    final_image->SetSpacing(1, 1, 0);
    int *dims = final_image->GetDimensions();

    for (int y = 0; y < dims[1]; y++) {
    
    
        for (int x = 0; x < dims[0]; x++) {
    
    
            double *pixel = static_cast<double *>(final_image->GetScalarPointer(x, y, 0));
            double value = final_poly->GetPointData()->GetScalars()->GetTuple1(dims[0] * y + x);
            pixel[0] = value;
        }
    }

  The resulting image is skewed to the right;
insert image description here

Official website TissueLens example

#include <vtkSphere.h>
#include <vtkClipDataSet.h>
#include <vtkCylinder.h>
int main(int, char*[])
{
    
    
    vtkNew<vtkNamedColors> colors;

    std::array<unsigned char, 4> skinColor{
    
     {
    
    240, 184, 160, 255} };
    colors->SetColor("SkinColor", skinColor.data());
    std::array<unsigned char, 4> backColor{
    
     {
    
    255, 229, 200, 255} };
    colors->SetColor("BackfaceColor", backColor.data());
    std::array<unsigned char, 4> bkg{
    
     {
    
    51, 77, 102, 255} };
    colors->SetColor("BkgColor", bkg.data());

    // Read the volume data
    vtkNew<vtkMetaImageReader> reader;
    reader->SetFileName("G:\\Data\\vtk-examples-master\\src\\Testing\\Data\\FullHead.mhd");
    reader->Update();

    // An isosurface, or contour value of 500 is known to correspond to the
    // skin of the patient.
#ifdef USE_FLYING_EDGES
    vtkNew<vtkFlyingEdges3D> skinExtractor;
#else
    vtkNew<vtkMarchingCubes> skinExtractor;
#endif
    skinExtractor->SetInputConnection(reader->GetOutputPort());
    skinExtractor->SetValue(0, 1000);

    // Define a spherical clip function to clip the isosurface
    vtkNew<vtkSphere> clipFunction;
    clipFunction->SetRadius(50);
    clipFunction->SetCenter(73, 52, 15);

    // Clip the isosurface with a sphere
    vtkNew<vtkClipDataSet> skinClip;
    skinClip->SetInputConnection(skinExtractor->GetOutputPort());
    skinClip->SetClipFunction(clipFunction);
    skinClip->SetValue(0);
    skinClip->GenerateClipScalarsOn();
    skinClip->Update();

    vtkNew<vtkDataSetMapper> skinMapper;
    skinMapper->SetInputConnection(skinClip->GetOutputPort());
    skinMapper->ScalarVisibilityOff();

    vtkNew<vtkActor> skin;
    skin->SetMapper(skinMapper);
    skin->GetProperty()->SetDiffuseColor(colors->GetColor3d("SkinColor").GetData());

    vtkNew<vtkProperty> backProp;
    backProp->SetDiffuseColor(colors->GetColor3d("BackfaceColor").GetData());
    skin->SetBackfaceProperty(backProp);

    // Define a model for the "lens". Its geometry matches the implicit
    // sphere used to clip the isosurface
    vtkNew<vtkSphereSource> lensModel;
    lensModel->SetRadius(50);
    lensModel->SetCenter(73, 52, 15);
    lensModel->SetPhiResolution(100);
    lensModel->SetThetaResolution(100);
   

    // Sample the input volume with the lens model geometry
    vtkNew<vtkProbeFilter> lensProbe;
    lensProbe->SetInputConnection(lensModel->GetOutputPort());
    lensProbe->SetSourceConnection(reader->GetOutputPort());

    // Clip the lens data with the isosurface value
    vtkNew<vtkClipDataSet> lensClip;
    lensClip->SetInputConnection(lensProbe->GetOutputPort());
    lensClip->SetValue(500);
    lensClip->GenerateClipScalarsOff();
    lensClip->Update();

    // Define a suitable grayscale lut
    vtkNew<vtkLookupTable> bwLut;
    bwLut->SetTableRange(-600, 2048);
    bwLut->SetSaturationRange(0, 0);
    bwLut->SetHueRange(0, 0);
    bwLut->SetValueRange(0, 1);
    bwLut->Build();

    vtkNew<vtkDataSetMapper> lensMapper;
    lensMapper->SetInputConnection(lensClip->GetOutputPort());
    lensMapper->SetScalarRange(lensClip->GetOutput()->GetScalarRange());
    lensMapper->SetLookupTable(bwLut);

    vtkNew<vtkActor> lens;
    lens->SetMapper(lensMapper);

    // It is convenient to create an initial view of the data. The FocalPoint
    // and Position form a vector direction. Later on (ResetCamera() method)
    // this vector is used to position the camera to look at the data in
    // this direction.
    vtkNew<vtkCamera> aCamera;
    aCamera->SetViewUp(0, 0, -1);
    aCamera->SetPosition(0, -1, 0);
    aCamera->SetFocalPoint(0, 0, 0);
    aCamera->ComputeViewPlaneNormal();
    aCamera->Azimuth(30.0);
    aCamera->Elevation(30.0);

    // Create the renderer, the render window, and the interactor. The renderer
    // draws into the render window, the interactor enables mouse- and
    // keyboard-based interaction with the data within the render window.
    //
    vtkNew<vtkRenderer> aRenderer;
    vtkNew<vtkRenderWindow> renWin;
    renWin->AddRenderer(aRenderer);

    vtkNew<vtkRenderWindowInteractor> iren;
    iren->SetRenderWindow(renWin);

    // Actors are added to the renderer. An initial camera view is created.
    // The Dolly() method moves the camera towards the FocalPoint,
    // thereby enlarging the image.
    aRenderer->AddActor(lens);
    aRenderer->AddActor(skin);
    aRenderer->SetActiveCamera(aCamera);
    aRenderer->ResetCamera();
    aCamera->Dolly(1.5);

    // Set a background color for the renderer and set the size of the
    // render window (expressed in pixels).
    aRenderer->SetBackground(colors->GetColor3d("BkgColor").GetData());
    renWin->SetSize(640, 480);
    renWin->SetWindowName("TissueLens");

    // Note that when camera movement occurs (as it does in the Dolly()
    // method), the clipping planes often need adjusting. Clipping planes
    // consist of two planes: near and far along the view direction. The
    // near plane clips out objects in front of the plane; the far plane
    // clips out objects behind the plane. This way only what is drawn
    // between the planes is actually rendered.
    aRenderer->ResetCameraClippingRange();

    // Initialize the event loop and then start it.
    renWin->Render();
    iren->Initialize();
    iren->Start();

    return EXIT_SUCCESS;
}

code reading

insert image description here
When the setting lensClipis SetValueset to 0, the complete hemisphere grayscale can be displayed completely. When it is set to 500, the value lower than 500 will be erased;
insert image description here
insert image description here

When a plane is composed of points and units, a plane grayscale image + polygonal rendering effect can be generated; the lower limit of the scalar CT value is constrained
;vtkClipDataSet

#include <vtkSphere.h>
#include <vtkClipDataSet.h>
#include <vtkCylinder.h>
int main(int, char*[])
{
    
    
    vtkNew<vtkNamedColors> colors;

    std::array<unsigned char, 4> skinColor{
    
     {
    
    240, 184, 160, 255} };
    colors->SetColor("SkinColor", skinColor.data());
    std::array<unsigned char, 4> backColor{
    
     {
    
    255, 229, 200, 255} };
    colors->SetColor("BackfaceColor", backColor.data());
    std::array<unsigned char, 4> bkg{
    
     {
    
    51, 77, 102, 255} };
    colors->SetColor("BkgColor", bkg.data());

    // Read the volume data
    vtkNew<vtkMetaImageReader> reader;
    reader->SetFileName("G:\\Data\\vtk-examples-master\\src\\Testing\\Data\\FullHead.mhd");
    reader->Update();

    // An isosurface, or contour value of 500 is known to correspond to the
    // skin of the patient.
#ifdef USE_FLYING_EDGES
    vtkNew<vtkFlyingEdges3D> skinExtractor;
#else
    vtkNew<vtkMarchingCubes> skinExtractor;
#endif
    skinExtractor->SetInputConnection(reader->GetOutputPort());
    skinExtractor->SetValue(0, 1000);

    // Define a spherical clip function to clip the isosurface
    vtkNew<vtkSphere> clipFunction;
    clipFunction->SetRadius(50);
    clipFunction->SetCenter(73, 52, 15);

    // Clip the isosurface with a sphere
    vtkNew<vtkClipDataSet> skinClip;
    skinClip->SetInputConnection(skinExtractor->GetOutputPort());
    skinClip->SetClipFunction(clipFunction);
    skinClip->SetValue(0);
    skinClip->GenerateClipScalarsOn();
    skinClip->Update();

    vtkNew<vtkDataSetMapper> skinMapper;
    skinMapper->SetInputConnection(skinClip->GetOutputPort());
    skinMapper->ScalarVisibilityOff();

    vtkNew<vtkActor> skin;
    skin->SetMapper(skinMapper);
    skin->GetProperty()->SetDiffuseColor(
        colors->GetColor3d("SkinColor").GetData());

    vtkNew<vtkProperty> backProp;
    backProp->SetDiffuseColor(colors->GetColor3d("BackfaceColor").GetData());
    skin->SetBackfaceProperty(backProp);

    vtkSmartPointer<vtkPoints> gridPoints = vtkSmartPointer<vtkPoints>::New();
    vtkNew<vtkCellArray> polys;
    for (unsigned int x = 0; x < 200; x++)
    {
    
    
        for (unsigned int y = 0; y < 200; y++)
        {
    
    
            gridPoints->InsertNextPoint(30 + x, 30 + y, 15 + 0);
        }
    }

    vtkIdType pts[4];
    for (unsigned int row = 0; row < 200 - 1; row++) {
    
    
        for (unsigned int col = 0; col < 200 - 1; col++) {
    
    
            pts[0] = col + row * (200);
            pts[1] = pts[0] + 1;
            pts[2] = pts[0] + 200 + 1;
            pts[3] = pts[0] + 200;
            polys->InsertNextCell(4, pts);
        }
    }
   
    // Create a dataset from the grid points
    vtkSmartPointer<vtkPolyData> gridPolyData = vtkSmartPointer<vtkPolyData>::New();
    gridPolyData->SetPoints(gridPoints);
    gridPolyData->SetPolys(polys);

    // Sample the input volume with the lens model geometry
    vtkNew<vtkProbeFilter> lensProbe;
    lensProbe->SetInputData(gridPolyData);
    lensProbe->SetSourceConnection(reader->GetOutputPort());

    // Clip the lens data with the isosurface value
    vtkNew<vtkClipDataSet> lensClip;
    lensClip->SetInputConnection(lensProbe->GetOutputPort());
    lensClip->SetValue(500);
    lensClip->GenerateClipScalarsOff();
    lensClip->Update();

    // Define a suitable grayscale lut
    vtkNew<vtkLookupTable> bwLut;
    bwLut->SetTableRange(0, 2048);
    bwLut->SetSaturationRange(0, 0);
    bwLut->SetHueRange(0, 0);
    bwLut->SetValueRange(0, 1);
    bwLut->Build();

    vtkNew<vtkDataSetMapper> lensMapper;
    lensMapper->SetInputConnection(lensClip->GetOutputPort());
    lensMapper->SetScalarRange(lensClip->GetOutput()->GetScalarRange());
    lensMapper->SetLookupTable(bwLut);

    vtkNew<vtkActor> lens;
    lens->SetMapper(lensMapper);

    // It is convenient to create an initial view of the data. The FocalPoint
    // and Position form a vector direction. Later on (ResetCamera() method)
    // this vector is used to position the camera to look at the data in
    // this direction.
    vtkNew<vtkCamera> aCamera;
    aCamera->SetViewUp(0, 0, -1);
    aCamera->SetPosition(0, -1, 0);
    aCamera->SetFocalPoint(0, 0, 0);
    aCamera->ComputeViewPlaneNormal();
    aCamera->Azimuth(30.0);
    aCamera->Elevation(30.0);

    // Create the renderer, the render window, and the interactor. The renderer
    // draws into the render window, the interactor enables mouse- and
    // keyboard-based interaction with the data within the render window.
    //
    vtkNew<vtkRenderer> aRenderer;
    vtkNew<vtkRenderWindow> renWin;
    renWin->AddRenderer(aRenderer);

    vtkNew<vtkRenderWindowInteractor> iren;
    iren->SetRenderWindow(renWin);

    // Actors are added to the renderer. An initial camera view is created.
    // The Dolly() method moves the camera towards the FocalPoint,
    // thereby enlarging the image.
    aRenderer->AddActor(lens);
    aRenderer->AddActor(skin);
    aRenderer->SetActiveCamera(aCamera);
    aRenderer->ResetCamera();
    aCamera->Dolly(1.5);

    // Set a background color for the renderer and set the size of the
    // render window (expressed in pixels).
    aRenderer->SetBackground(colors->GetColor3d("BkgColor").GetData());
    renWin->SetSize(640, 480);
    renWin->SetWindowName("TissueLens");

    // Note that when camera movement occurs (as it does in the Dolly()
    // method), the clipping planes often need adjusting. Clipping planes
    // consist of two planes: near and far along the view direction. The
    // near plane clips out objects in front of the plane; the far plane
    // clips out objects behind the plane. This way only what is drawn
    // between the planes is actually rendered.
    aRenderer->ResetCameraClippingRange();

    // Initialize the event loop and then start it.
    renWin->Render();
    iren->Initialize();
    iren->Start();

    return EXIT_SUCCESS;
}

insert image description here

References

1. vtk two-dimensional reconstruction slice II
2. CurvedReformation
3. vtkProbeFilter Class Reference
4. TissueLens

Guess you like

Origin blog.csdn.net/liushao1031177/article/details/122860254