template<class TInputImage, class TOutputImage>
class itk::AntiAliasBinaryImageFilter< TInputImage, TOutputImage >
A method for estimation of a surface from a binary volume.
- This filter implements a surface-fitting method for estimation of a surface from a binary volume. This process can be used to reduce aliasing artifacts which result in visualization of binary partitioned surfaces.
- The binary volume (filter input) is used as a set of constraints in an iterative relaxation process of an estimated ND surface. The surface is described implicitly as the zero level set of a volume and allowed to deform under curvature flow. A set of contraints is imposed on this movement as follows:
- where is the value of at discrete index and iteration , is the gradient magnitude times mean curvature of , and is the binary input volume, with 1 denoting an inside pixel and -1 denoting an outside pixel.
- NOTES
- This implementation uses a sparse field level set solver instead of the narrow band implementation described in the reference below, which may introduce some differences in how fast and how accurately (in terms of RMS error) the solution converges.
- REFERENCES
- Whitaker, Ross. "Reducing Aliasing Artifacts In Iso-Surfaces of Binary
Volumes" IEEE Volume Visualization and Graphics Symposium, October 2000, pp.23-32.
- PARAMETERS
- The MaximumRMSChange parameter is used to determine when the solution has converged. A lower value will result in a tighter-fitting solution, but will require more computations. Too low a value could put the solver into an infinite loop. Values should always be less than 1.0. A value of 0.07 is a good starting estimate.
- The MaximumIterations parameter can be used to halt the solution after a specified number of iterations.
- INPUT
- The input is an N-dimensional image of any type. It is assumed to be a binary image. The filter will use an isosurface value that is halfway between the min and max values in the image. A signed data type is not necessary for the input.
- OUTPUT
- The filter will output a level set image of real, signed values. The zero crossings of this (N-dimensional) image represent the position of the isosurface value of interest. Values outside the zero level set are negative and values inside the zero level set are positive values.
- IMPORTANT!
- The output image type you use to instantiate this filter should be a real valued scalar type. In other words: doubles or floats.
- USING THIS FILTER
- The filter is relatively straightforward to use. Tests and examples exist to illustrate. The important thing is to understand the input and output types so you can properly interperet your results.
- In the common case, the only parameter that will need to be set is the MaximumRMSChange parameter, which determines when the solver halts.
- Wiki Examples:
Definition at line 107 of file itkAntiAliasBinaryImageFilter.h.
|
| AntiAliasBinaryImageFilter () |
|
virtual ValueType | CalculateUpdateValue (const IndexType &idx, const TimeStepType &dt, const ValueType &value, const ValueType &change) |
|
void | GenerateData () |
|
virtual void | PrintSelf (std::ostream &os, Indent indent) const |
|
| ~AntiAliasBinaryImageFilter () |
|
void | AllocateUpdateBuffer () |
|
void | ApplyUpdate (const TimeStepType &dt) |
|
TimeStepType | CalculateChange () |
|
void | ConstructActiveLayer () |
|
void | ConstructLayer (StatusType from, StatusType to) |
|
void | CopyInputToOutput () |
|
virtual ValueType | GetValueOne () const |
|
virtual ValueType | GetValueZero () const |
|
void | Initialize () |
|
void | InitializeActiveLayerValues () |
|
virtual void | InitializeBackgroundPixels () |
|
virtual void | PostProcessOutput () |
|
void | ProcessOutsideList (LayerType *OutsideList, StatusType ChangeToStatus) |
|
void | ProcessStatusList (LayerType *InputList, LayerType *OutputList, StatusType ChangeToStatus, StatusType SearchForStatus) |
|
void | PropagateAllLayerValues () |
|
void | PropagateLayerValues (StatusType from, StatusType to, StatusType promote, int InOrOut) |
|
| SparseFieldLevelSetImageFilter () |
|
void | UpdateActiveLayerValues (TimeStepType dt, LayerType *StatusUpList, LayerType *StatusDownList) |
|
| ~SparseFieldLevelSetImageFilter () |
|
| FiniteDifferenceImageFilter () |
|
virtual void | GenerateInputRequestedRegion () |
|
virtual bool | Halt () |
|
virtual void | InitializeIteration () |
|
virtual TimeStepType | ResolveTimeStep (const std::vector< TimeStepType > &timeStepList, const std::vector< bool > &valid) const |
|
virtual void | SetElapsedIterations (IdentifierType _arg) |
|
virtual bool | ThreadedHalt (void *) |
|
virtual | ~FiniteDifferenceImageFilter () |
|
virtual void | AllocateOutputs () |
|
virtual bool | GetRunningInPlace () const |
|
| InPlaceImageFilter () |
|
virtual void | ReleaseInputs () |
|
| ~InPlaceImageFilter () |
|
virtual void | CallCopyInputRegionToOutputRegion (OutputImageRegionType &destRegion, const InputImageRegionType &srcRegion) |
|
virtual void | CallCopyOutputRegionToInputRegion (InputImageRegionType &destRegion, const OutputImageRegionType &srcRegion) |
|
virtual void | VerifyInputInformation () |
|
void | PushBackInput (const DataObject *input) |
|
void | PushFrontInput (const DataObject *input) |
|
| ImageToImageFilter () |
|
| ~ImageToImageFilter () |
|
virtual void | AfterThreadedGenerateData () |
|
virtual void | BeforeThreadedGenerateData () |
|
| ImageSource () |
|
virtual unsigned int | SplitRequestedRegion (unsigned int i, unsigned int num, OutputImageRegionType &splitRegion) |
|
virtual void | ThreadedGenerateData (const OutputImageRegionType &outputRegionForThread, ThreadIdType threadId) |
|
virtual | ~ImageSource () |
|
virtual void | AddInput (DataObject *input) |
|
virtual void | AddOutput (DataObject *output) |
|
bool | AddRequiredInputName (const DataObjectIdentifierType &) |
|
virtual void | CacheInputReleaseDataFlags () |
|
virtual void | GenerateOutputInformation () |
|
virtual void | GenerateOutputRequestedRegion (DataObject *output) |
|
virtual const
DataObjectPointerArraySizeType & | GetNumberOfRequiredOutputs () |
|
bool | IsIndexedInputName (const DataObjectIdentifierType &) const |
|
bool | IsIndexedOutputName (const DataObjectIdentifierType &) const |
|
bool | IsRequiredInputName (const DataObjectIdentifierType &) const |
|
| itkLegacyMacro (virtual void RemoveInput(DataObject *input)) |
|
| itkLegacyMacro (virtual void RemoveOutput(DataObject *output)) |
|
| itkLegacyMacro (void SetNumberOfInputs(DataObjectPointerArraySizeType num)) |
|
| itkLegacyMacro (void SetNumberOfOutputs(DataObjectPointerArraySizeType num)) |
|
DataObjectPointerArraySizeType | MakeIndexFromInputName (const DataObjectIdentifierType &name) const |
|
DataObjectPointerArraySizeType | MakeIndexFromOutputName (const DataObjectIdentifierType &name) const |
|
DataObjectIdentifierType | MakeNameFromInputIndex (DataObjectPointerArraySizeType idx) const |
|
DataObjectIdentifierType | MakeNameFromOutputIndex (DataObjectPointerArraySizeType idx) const |
|
| ProcessObject () |
|
virtual void | PropagateResetPipeline () |
|
virtual void | RemoveInput (const DataObjectIdentifierType &key) |
|
virtual void | RemoveInput (DataObjectPointerArraySizeType) |
|
virtual void | RemoveOutput (const DataObjectIdentifierType &key) |
|
virtual void | RemoveOutput (DataObjectPointerArraySizeType idx) |
|
bool | RemoveRequiredInputName (const DataObjectIdentifierType &) |
|
virtual void | RestoreInputReleaseDataFlags () |
|
virtual void | SetInput (const DataObjectIdentifierType &key, DataObject *input) |
|
virtual void | SetNthInput (DataObjectPointerArraySizeType num, DataObject *input) |
|
virtual void | SetNthOutput (DataObjectPointerArraySizeType num, DataObject *output) |
|
void | SetNumberOfIndexedInputs (DataObjectPointerArraySizeType num) |
|
void | SetNumberOfIndexedOutputs (DataObjectPointerArraySizeType num) |
|
virtual void | SetNumberOfRequiredOutputs (DataObjectPointerArraySizeType _arg) |
|
virtual void | SetOutput (const DataObjectIdentifierType &key, DataObject *output) |
|
virtual void | SetPrimaryInput (DataObject *input) |
|
virtual void | SetPrimaryOutput (DataObject *output) |
|
void | SetRequiredInputNames (const NameArray &) |
|
virtual void | VerifyPreconditions () |
|
| ~ProcessObject () |
|
DataObject * | GetInput (const DataObjectIdentifierType &key) |
|
const DataObject * | GetInput (const DataObjectIdentifierType &key) const |
|
DataObject * | GetInput (DataObjectPointerArraySizeType) |
|
const DataObject * | GetInput (DataObjectPointerArraySizeType idx) const |
|
DataObject * | GetPrimaryInput () |
|
const DataObject * | GetPrimaryInput () const |
|
virtual void | SetPrimaryInputName (const DataObjectIdentifierType &key) |
|
virtual const char * | GetPrimaryInputName () const |
|
DataObject * | GetOutput (const DataObjectIdentifierType &key) |
|
const DataObject * | GetOutput (const DataObjectIdentifierType &key) const |
|
virtual void | SetPrimaryOutputName (const DataObjectIdentifierType &key) |
|
virtual const char * | GetPrimaryOutputName () const |
|
DataObject * | GetOutput (DataObjectPointerArraySizeType idx) |
|
const DataObject * | GetOutput (DataObjectPointerArraySizeType idx) const |
|
DataObject * | GetPrimaryOutput () |
|
const DataObject * | GetPrimaryOutput () const |
|
virtual void | SetNumberOfRequiredInputs (DataObjectPointerArraySizeType) |
|
virtual const
DataObjectPointerArraySizeType & | GetNumberOfRequiredInputs () |
|
| Object () |
|
bool | PrintObservers (std::ostream &os, Indent indent) const |
|
virtual void | SetTimeStamp (const TimeStamp &time) |
|
virtual | ~Object () |
|
virtual LightObject::Pointer | InternalClone () const |
|
| LightObject () |
|
virtual void | PrintHeader (std::ostream &os, Indent indent) const |
|
virtual void | PrintTrailer (std::ostream &os, Indent indent) const |
|
virtual | ~LightObject () |
|