final byte[] lut = new byte[256];
for (int i = 1; i < lut.length; i++)
lut[i] = (byte) (scale * i + offset + 0.5d);
//do the actual lookup
final LookupTableJAI lookup = new LookupTableJAI(lut);
final ParameterBlock pb = new ParameterBlock();
pb.addSource(inputImage);
pb.add(lookup);
return JAI.create("lookup", pb, hints);
}
////
//
// General case, we use the rescale in order to stretch the values to highest and lowest dim
//
////
//get the correct dim for this data type
final double maximum=ColorUtilities.getMaximum(dataType);
final double minimum=ColorUtilities.getMinimum(dataType);
if (extrema[1][0] == maximum && extrema[0][0] == minimum)
return inputImage;
//compute the scale factors
final double delta = extrema[1][0] - extrema[0][0];
final double scale = (maximum -minimum)/ delta;
final double offset = minimum - scale * extrema[0][0];
//do the actual rescale
final ParameterBlock pb = new ParameterBlock();
pb.addSource(inputImage);
pb.add(new double []{scale});
pb.add(new double []{offset});
return JAI.create("rescale", pb, hints);
}
// /////////////////////////////////////////////////////////////////////
//
// EXPONENTIAL Normalization
//
//
//
// /////////////////////////////////////////////////////////////////////
if (type.equalsIgnoreCase("EXPONENTIAL")) {
if(dataType==DataBuffer.TYPE_BYTE){
////
//
// Optimisation for byte images
//
////
final byte lut[] = new byte[256];
final double normalizationFactor=255.0;
final double correctionFactor=255.0/(Math.E-1);
for (int i = 1; i < lut.length; i++)
lut[i] = (byte) (0.5f + correctionFactor * (Math.exp(i / normalizationFactor) - 1.0));
return LookupDescriptor.create(inputImage,
new LookupTableJAI(lut), hints);
}
////
//
// General case, we use the piecewise1D transform
//
////
//
// STEP 1 do the extrema
//
////
//step 1 do the extrema to get the statistics for this image
final RenderedOp statistics = ExtremaDescriptor.create(inputImage,
null, Integer.valueOf(1), Integer.valueOf(1), null,
Integer.valueOf(1), null);
final double[] minimum=(double[]) statistics.getProperty("minimum");
final double[] maximum=(double[]) statistics.getProperty("maximum");
final double normalizationFactor=maximum[0];
final double correctionFactor=normalizationFactor/(Math.E-1);
////
//
// STEP 2 do the gamma correction by using generic piecewise
//
////
final DefaultPiecewiseTransform1DElement mainElement = DefaultPiecewiseTransform1DElement.create(
"exponential-contrast-enhancement-transform", NumberRange.create(minimum[0],maximum[0]),
new MathTransform1DAdapter() {
/*
* (non-Javadoc)
* @see org.opengis.referencing.operation.MathTransform1D#derivative(double)
*/
public double derivative(double value)
throws TransformException {
throw new UnsupportedOperationException(Errors.format(ErrorKeys.UNSUPPORTED_OPERATION_$1));
}
public boolean isIdentity() {
return false;
}
/*
* (non-Javadoc)
* @see org.opengis.referencing.operation.MathTransform1D#transform(double)
*/
public double transform(double value)
throws TransformException {
value = correctionFactor*(Math.exp(value/normalizationFactor)-1);
return value;
}
});
final PiecewiseTransform1D<DefaultPiecewiseTransform1DElement> transform = new DefaultPiecewiseTransform1D<DefaultPiecewiseTransform1DElement> (
new DefaultPiecewiseTransform1DElement[] {mainElement},0);
final ParameterBlockJAI pbj = new ParameterBlockJAI(
GenericPiecewise.OPERATION_NAME);
pbj.addSource(inputImage);
pbj.setParameter("Domain1D", transform);
pbj.setParameter("bandIndex", Integer.valueOf(0));
return JAI.create(
GenericPiecewise.OPERATION_NAME, pbj);
}
if (type.equalsIgnoreCase("LOGARITHMIC")) {
// /////////////////////////////////////////////////////////////////////
//
// Logarithm Normalization
//
//
//
// /////////////////////////////////////////////////////////////////////
if(dataType==DataBuffer.TYPE_BYTE){
////
//
// Optimisation for byte images m we use lookup
//
////
final byte lut[] = new byte[256];
final double normalizationFactor=255.0;
final double correctionFactor=100.0;
for (int i = 1; i < lut.length; i++)
lut[i] = (byte) (0.5f + normalizationFactor * Math.log((i * correctionFactor / normalizationFactor+ 1.0)));
return LookupDescriptor.create(inputImage,
new LookupTableJAI(lut), hints);
}
////
//
// General case
//
////
//define a specific piecewise for the logarithm
////
//
// STEP 1 do the extrema
//
////
//step 1 do the extrema to get the statistics for this image
final RenderedOp statistics = ExtremaDescriptor.create(inputImage,
null, Integer.valueOf(1), Integer.valueOf(1), null,
Integer.valueOf(1), null);
final double[] minimum=(double[]) statistics.getProperty("minimum");
final double[] maximum=(double[]) statistics.getProperty("maximum");
final double normalizationFactor=maximum[0];
final double correctionFactor=100.0;
////
//
// STEP 2 do the gamma correction by using generic piecewise
//
////
final DefaultPiecewiseTransform1DElement mainElement = DefaultPiecewiseTransform1DElement.create(
"logarithmic-contrast-enhancement-transform", NumberRange.create(minimum[0],maximum[0]),
new MathTransform1DAdapter() {
/*
* (non-Javadoc)
* @see org.opengis.referencing.operation.MathTransform1D#derivative(double)
*/
public double derivative(double value)
throws TransformException {
throw new UnsupportedOperationException(Errors.format(ErrorKeys.UNSUPPORTED_OPERATION_$1));
}
public boolean isIdentity() {
return false;
}
/*
* (non-Javadoc)
* @see org.opengis.referencing.operation.MathTransform1D#transform(double)
*/
public double transform(double value)
throws TransformException {
value =normalizationFactor*Math.log(1+(value*correctionFactor/normalizationFactor));
return value;
}
});
final PiecewiseTransform1D<DefaultPiecewiseTransform1DElement> transform = new DefaultPiecewiseTransform1D<DefaultPiecewiseTransform1DElement> (
new DefaultPiecewiseTransform1DElement[] {mainElement},0);
final ParameterBlockJAI pbj = new ParameterBlockJAI(
GenericPiecewise.OPERATION_NAME);
pbj.addSource(inputImage);
pbj.setParameter("Domain1D", transform);
pbj.setParameter("bandIndex", Integer.valueOf(0));
return JAI.create(
GenericPiecewise.OPERATION_NAME, pbj);
}
if (type.equalsIgnoreCase("HISTOGRAM")) {
// /////////////////////////////////////////////////////////////////////
//
// Histogram Equalization
//
// IT WORKS ONLY ON BYTE DATA TYPE!!!
//
// /////////////////////////////////////////////////////////////////////
//convert the input image to 8 bit
inputImage=new ImageWorker(inputImage).rescaleToBytes().getRenderedImage();
// compute the histogram
final RenderedOp hist = HistogramDescriptor.create(inputImage,
null, Integer.valueOf(1), Integer.valueOf(1),
new int[] { 256 }, new double[] { 0 },
new double[] { 256 }, null);
final Histogram h = (Histogram) hist.getProperty("histogram");
// now compute the PDF and the CDF for the original image
final byte[] cumulative = new byte[h.getNumBins(0)];
// sum of bins (we might have excluded 0 hence we cannot really
// optimise)
float totalBinSum = 0;
for (int i = 0; i < cumulative.length; i++) {
totalBinSum += h.getBinSize(0, i);
}
// this is the scale factor for the histogram equalization
// process
final float scale = (float) (h.getHighValue(0) - 1 - h.getLowValue(0))/ totalBinSum;
float sum = 0;
for (int i = 1; i < cumulative.length; i++) {
sum += h.getBinSize(0, i - 1);
cumulative[i] = (byte) ((sum * scale + h.getLowValue(0)) + .5F);
}
final LookupTableJAI lookup = new LookupTableJAI(cumulative);
final ParameterBlock pb = new ParameterBlock();
pb.addSource(hist);
pb.add(lookup);
return JAI.create("lookup", pb, hints);
}