0001: /*
0002: * Licensed to the Apache Software Foundation (ASF) under one or more
0003: * contributor license agreements. See the NOTICE file distributed with
0004: * this work for additional information regarding copyright ownership.
0005: * The ASF licenses this file to You under the Apache License, Version 2.0
0006: * (the "License"); you may not use this file except in compliance with
0007: * the License. You may obtain a copy of the License at
0008: *
0009: * http://www.apache.org/licenses/LICENSE-2.0
0010: *
0011: * Unless required by applicable law or agreed to in writing, software
0012: * distributed under the License is distributed on an "AS IS" BASIS,
0013: * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0014: * See the License for the specific language governing permissions and
0015: * limitations under the License.
0016: */
0017:
0018: /* $Id: TIFFImage.java 496556 2007-01-16 00:59:48Z cam $ */
0019:
0020: package org.apache.xmlgraphics.image.codec.tiff;
0021:
0022: import java.awt.Rectangle;
0023: import java.awt.Transparency;
0024: import java.awt.color.ColorSpace;
0025: import java.awt.image.ColorModel;
0026: import java.awt.image.ComponentColorModel;
0027: import java.awt.image.DataBuffer;
0028: import java.awt.image.DataBufferByte;
0029: import java.awt.image.DataBufferInt;
0030: import java.awt.image.DataBufferShort;
0031: import java.awt.image.DataBufferUShort;
0032: import java.awt.image.IndexColorModel;
0033: import java.awt.image.MultiPixelPackedSampleModel;
0034: import java.awt.image.PixelInterleavedSampleModel;
0035: import java.awt.image.Raster;
0036: import java.awt.image.SampleModel;
0037: import java.awt.image.WritableRaster;
0038: import java.io.ByteArrayInputStream;
0039: import java.io.IOException;
0040: import java.util.HashMap;
0041: import java.util.Map;
0042: import java.util.zip.DataFormatException;
0043: import java.util.zip.Inflater;
0044:
0045: import org.apache.xmlgraphics.image.codec.util.SeekableStream;
0046: import org.apache.xmlgraphics.image.rendered.AbstractRed;
0047: import org.apache.xmlgraphics.image.rendered.CachableRed;
0048:
0049: import com.sun.image.codec.jpeg.JPEGCodec;
0050: import com.sun.image.codec.jpeg.JPEGDecodeParam;
0051: import com.sun.image.codec.jpeg.JPEGImageDecoder;
0052:
0053: public class TIFFImage extends AbstractRed {
0054:
0055: // Compression types
0056: public static final int COMP_NONE = 1;
0057: public static final int COMP_FAX_G3_1D = 2;
0058: public static final int COMP_FAX_G3_2D = 3;
0059: public static final int COMP_FAX_G4_2D = 4;
0060: public static final int COMP_LZW = 5;
0061: public static final int COMP_JPEG_OLD = 6;
0062: public static final int COMP_JPEG_TTN2 = 7;
0063: public static final int COMP_PACKBITS = 32773;
0064: public static final int COMP_DEFLATE = 32946;
0065:
0066: // Image types
0067: private static final int TYPE_UNSUPPORTED = -1;
0068: private static final int TYPE_BILEVEL = 0;
0069: private static final int TYPE_GRAY_4BIT = 1;
0070: private static final int TYPE_GRAY = 2;
0071: private static final int TYPE_GRAY_ALPHA = 3;
0072: private static final int TYPE_PALETTE = 4;
0073: private static final int TYPE_RGB = 5;
0074: private static final int TYPE_RGB_ALPHA = 6;
0075: private static final int TYPE_YCBCR_SUB = 7;
0076: private static final int TYPE_GENERIC = 8;
0077:
0078: // Incidental tags
0079: private static final int TIFF_JPEG_TABLES = 347;
0080: private static final int TIFF_YCBCR_SUBSAMPLING = 530;
0081:
0082: SeekableStream stream;
0083: int tileSize;
0084: int tilesX, tilesY;
0085: long[] tileOffsets;
0086: long[] tileByteCounts;
0087: char[] colormap;
0088: int sampleSize;
0089: int compression;
0090: byte[] palette;
0091: int numBands;
0092:
0093: int chromaSubH;
0094: int chromaSubV;
0095:
0096: // Fax compression related variables
0097: long tiffT4Options;
0098: long tiffT6Options;
0099: int fillOrder;
0100:
0101: // LZW compression related variable
0102: int predictor;
0103:
0104: // TTN2 JPEG related variables
0105: JPEGDecodeParam decodeParam = null;
0106: boolean colorConvertJPEG = false;
0107:
0108: // DEFLATE variables
0109: Inflater inflater = null;
0110:
0111: // Endian-ness indicator
0112: boolean isBigEndian;
0113:
0114: int imageType;
0115: boolean isWhiteZero = false;
0116: int dataType;
0117:
0118: boolean decodePaletteAsShorts;
0119: boolean tiled;
0120:
0121: // Decoders
0122: private TIFFFaxDecoder decoder = null;
0123: private TIFFLZWDecoder lzwDecoder = null;
0124:
0125: /**
0126: * Decode a buffer of data into a Raster with the specified location.
0127: *
0128: * @param data buffer contain an interchange or abbreviated datastream.
0129: * @param decodeParam decoding parameters; may be null unless the
0130: * data buffer contains an abbreviated datastream in which case
0131: * it may not be null or an error will occur.
0132: * @param colorConvert whether to perform color conversion; in this
0133: * case that would be limited to YCbCr-to-RGB.
0134: * @param minX the X position of the returned Raster.
0135: * @param minY the Y position of the returned Raster.
0136: */
0137: private static final Raster decodeJPEG(byte[] data,
0138: JPEGDecodeParam decodeParam, boolean colorConvert,
0139: int minX, int minY) {
0140: // Create an InputStream from the compressed data array.
0141: ByteArrayInputStream jpegStream = new ByteArrayInputStream(data);
0142:
0143: // Create a decoder.
0144: JPEGImageDecoder decoder = decodeParam == null ? JPEGCodec
0145: .createJPEGDecoder(jpegStream) : JPEGCodec
0146: .createJPEGDecoder(jpegStream, decodeParam);
0147:
0148: // Decode the compressed data into a Raster.
0149: Raster jpegRaster;
0150: try {
0151: jpegRaster = colorConvert ? decoder.decodeAsBufferedImage()
0152: .getWritableTile(0, 0) : decoder.decodeAsRaster();
0153: } catch (IOException ioe) {
0154: throw new RuntimeException("TIFFImage13");
0155: }
0156:
0157: // Translate the decoded Raster to the specified location and return.
0158: return jpegRaster.createTranslatedChild(minX, minY);
0159: }
0160:
0161: /**
0162: * Inflates <code>deflated</code> into <code>inflated</code> using the
0163: * <code>Inflater</code> constructed during class instantiation.
0164: */
0165: private final void inflate(byte[] deflated, byte[] inflated) {
0166: inflater.setInput(deflated);
0167: try {
0168: inflater.inflate(inflated);
0169: } catch (DataFormatException dfe) {
0170: throw new RuntimeException("TIFFImage17" + ": "
0171: + dfe.getMessage());
0172: }
0173: inflater.reset();
0174: }
0175:
0176: private static SampleModel createPixelInterleavedSampleModel(
0177: int dataType, int tileWidth, int tileHeight, int bands) {
0178: int[] bandOffsets = new int[bands];
0179: for (int i = 0; i < bands; i++)
0180: bandOffsets[i] = i;
0181: return new PixelInterleavedSampleModel(dataType, tileWidth,
0182: tileHeight, bands, tileWidth * bands, bandOffsets);
0183: }
0184:
0185: /**
0186: * Return as a long[] the value of a TIFF_LONG or TIFF_SHORT field.
0187: */
0188: private final long[] getFieldAsLongs(TIFFField field) {
0189: long[] value = null;
0190:
0191: if (field.getType() == TIFFField.TIFF_SHORT) {
0192: char[] charValue = field.getAsChars();
0193: value = new long[charValue.length];
0194: for (int i = 0; i < charValue.length; i++) {
0195: value[i] = charValue[i] & 0xffff;
0196: }
0197: } else if (field.getType() == TIFFField.TIFF_LONG) {
0198: value = field.getAsLongs();
0199: } else {
0200: throw new RuntimeException();
0201: }
0202:
0203: return value;
0204: }
0205:
0206: /**
0207: * Constructs a TIFFImage that acquires its data from a given
0208: * SeekableStream and reads from a particular IFD of the stream.
0209: * The index of the first IFD is 0.
0210: *
0211: * @param stream the SeekableStream to read from.
0212: * @param param an instance of TIFFDecodeParam, or null.
0213: * @param directory the index of the IFD to read from.
0214: */
0215: public TIFFImage(SeekableStream stream, TIFFDecodeParam param,
0216: int directory) throws IOException {
0217:
0218: this .stream = stream;
0219: if (param == null) {
0220: param = new TIFFDecodeParam();
0221: }
0222:
0223: decodePaletteAsShorts = param.getDecodePaletteAsShorts();
0224:
0225: // Read the specified directory.
0226: TIFFDirectory dir = param.getIFDOffset() == null ? new TIFFDirectory(
0227: stream, directory)
0228: : new TIFFDirectory(stream, param.getIFDOffset()
0229: .longValue(), directory);
0230:
0231: // Get the number of samples per pixel
0232: TIFFField sfield = dir
0233: .getField(TIFFImageDecoder.TIFF_SAMPLES_PER_PIXEL);
0234: int samplesPerPixel = sfield == null ? 1 : (int) sfield
0235: .getAsLong(0);
0236:
0237: // Read the TIFF_PLANAR_CONFIGURATION field
0238: TIFFField planarConfigurationField = dir
0239: .getField(TIFFImageDecoder.TIFF_PLANAR_CONFIGURATION);
0240: char[] planarConfiguration = planarConfigurationField == null ? new char[] { 1 }
0241: : planarConfigurationField.getAsChars();
0242:
0243: // Support planar format (band sequential) only for 1 sample/pixel.
0244: if (planarConfiguration[0] != 1 && samplesPerPixel != 1) {
0245: throw new RuntimeException("TIFFImage0");
0246: }
0247:
0248: // Read the TIFF_BITS_PER_SAMPLE field
0249: TIFFField bitsField = dir
0250: .getField(TIFFImageDecoder.TIFF_BITS_PER_SAMPLE);
0251: char[] bitsPerSample = null;
0252: if (bitsField != null) {
0253: bitsPerSample = bitsField.getAsChars();
0254: } else {
0255: bitsPerSample = new char[] { 1 };
0256:
0257: // Ensure that all samples have the same bit depth.
0258: for (int i = 1; i < bitsPerSample.length; i++) {
0259: if (bitsPerSample[i] != bitsPerSample[0]) {
0260: throw new RuntimeException("TIFFImage1");
0261: }
0262: }
0263: }
0264: sampleSize = bitsPerSample[0];
0265:
0266: // Read the TIFF_SAMPLE_FORMAT tag to see whether the data might be
0267: // signed or floating point
0268: TIFFField sampleFormatField = dir
0269: .getField(TIFFImageDecoder.TIFF_SAMPLE_FORMAT);
0270:
0271: char[] sampleFormat = null;
0272: if (sampleFormatField != null) {
0273: sampleFormat = sampleFormatField.getAsChars();
0274:
0275: // Check that all the samples have the same format
0276: for (int l = 1; l < sampleFormat.length; l++) {
0277: if (sampleFormat[l] != sampleFormat[0]) {
0278: throw new RuntimeException("TIFFImage2");
0279: }
0280: }
0281:
0282: } else {
0283: sampleFormat = new char[] { 1 };
0284: }
0285:
0286: // Set the data type based on the sample size and format.
0287: boolean isValidDataFormat = false;
0288: switch (sampleSize) {
0289: case 1:
0290: case 4:
0291: case 8:
0292: if (sampleFormat[0] != 3) {
0293: // Ignore whether signed or unsigned: treat all as unsigned.
0294: dataType = DataBuffer.TYPE_BYTE;
0295: isValidDataFormat = true;
0296: }
0297: break;
0298: case 16:
0299: if (sampleFormat[0] != 3) {
0300: dataType = sampleFormat[0] == 2 ? DataBuffer.TYPE_SHORT
0301: : DataBuffer.TYPE_USHORT;
0302: isValidDataFormat = true;
0303: }
0304: break;
0305: case 32:
0306: if (sampleFormat[0] == 3)
0307: isValidDataFormat = false;
0308: else {
0309: dataType = DataBuffer.TYPE_INT;
0310: isValidDataFormat = true;
0311: }
0312: break;
0313: }
0314:
0315: if (!isValidDataFormat) {
0316: throw new RuntimeException("TIFFImage3");
0317: }
0318:
0319: // Figure out what compression if any, is being used.
0320: TIFFField compField = dir
0321: .getField(TIFFImageDecoder.TIFF_COMPRESSION);
0322: compression = compField == null ? COMP_NONE : compField
0323: .getAsInt(0);
0324:
0325: // Get the photometric interpretation.
0326: int photometricType = (int) dir
0327: .getFieldAsLong(TIFFImageDecoder.TIFF_PHOTOMETRIC_INTERPRETATION);
0328:
0329: // Determine which kind of image we are dealing with.
0330: imageType = TYPE_UNSUPPORTED;
0331: switch (photometricType) {
0332: case 0: // WhiteIsZero
0333: isWhiteZero = true;
0334: case 1: // BlackIsZero
0335: if (sampleSize == 1 && samplesPerPixel == 1) {
0336: imageType = TYPE_BILEVEL;
0337: } else if (sampleSize == 4 && samplesPerPixel == 1) {
0338: imageType = TYPE_GRAY_4BIT;
0339: } else if (sampleSize % 8 == 0) {
0340: if (samplesPerPixel == 1) {
0341: imageType = TYPE_GRAY;
0342: } else if (samplesPerPixel == 2) {
0343: imageType = TYPE_GRAY_ALPHA;
0344: } else {
0345: imageType = TYPE_GENERIC;
0346: }
0347: }
0348: break;
0349: case 2: // RGB
0350: if (sampleSize % 8 == 0) {
0351: if (samplesPerPixel == 3) {
0352: imageType = TYPE_RGB;
0353: } else if (samplesPerPixel == 4) {
0354: imageType = TYPE_RGB_ALPHA;
0355: } else {
0356: imageType = TYPE_GENERIC;
0357: }
0358: }
0359: break;
0360: case 3: // RGB Palette
0361: if (samplesPerPixel == 1
0362: && (sampleSize == 4 || sampleSize == 8 || sampleSize == 16)) {
0363: imageType = TYPE_PALETTE;
0364: }
0365: break;
0366: case 4: // Transparency mask
0367: if (sampleSize == 1 && samplesPerPixel == 1) {
0368: imageType = TYPE_BILEVEL;
0369: }
0370: break;
0371: case 6: // YCbCr
0372: if (compression == COMP_JPEG_TTN2 && sampleSize == 8
0373: && samplesPerPixel == 3) {
0374: // Set color conversion flag.
0375: colorConvertJPEG = param.getJPEGDecompressYCbCrToRGB();
0376:
0377: // Set type to RGB if color converting.
0378: imageType = colorConvertJPEG ? TYPE_RGB : TYPE_GENERIC;
0379: } else {
0380: TIFFField chromaField = dir
0381: .getField(TIFF_YCBCR_SUBSAMPLING);
0382: if (chromaField != null) {
0383: chromaSubH = chromaField.getAsInt(0);
0384: chromaSubV = chromaField.getAsInt(1);
0385: } else {
0386: chromaSubH = chromaSubV = 2;
0387: }
0388:
0389: if (chromaSubH * chromaSubV == 1) {
0390: imageType = TYPE_GENERIC;
0391: } else if (sampleSize == 8 && samplesPerPixel == 3) {
0392: imageType = TYPE_YCBCR_SUB;
0393: }
0394: }
0395: break;
0396: default: // Other including CMYK, CIE L*a*b*, unknown.
0397: if (sampleSize % 8 == 0) {
0398: imageType = TYPE_GENERIC;
0399: }
0400: }
0401:
0402: // Bail out if not one of the supported types.
0403: if (imageType == TYPE_UNSUPPORTED) {
0404: throw new RuntimeException("TIFFImage4");
0405: }
0406:
0407: // Set basic image layout
0408: Rectangle bounds = new Rectangle(
0409: 0,
0410: 0,
0411: (int) dir
0412: .getFieldAsLong(TIFFImageDecoder.TIFF_IMAGE_WIDTH),
0413: (int) dir
0414: .getFieldAsLong(TIFFImageDecoder.TIFF_IMAGE_LENGTH));
0415:
0416: // Set a preliminary band count. This may be changed later as needed.
0417: numBands = samplesPerPixel;
0418:
0419: // Figure out if any extra samples are present.
0420: TIFFField efield = dir
0421: .getField(TIFFImageDecoder.TIFF_EXTRA_SAMPLES);
0422: int extraSamples = efield == null ? 0 : (int) efield
0423: .getAsLong(0);
0424:
0425: int tileWidth, tileHeight;
0426: if (dir.getField(TIFFImageDecoder.TIFF_TILE_OFFSETS) != null) {
0427: tiled = true;
0428: // Image is in tiled format
0429: tileWidth = (int) dir
0430: .getFieldAsLong(TIFFImageDecoder.TIFF_TILE_WIDTH);
0431: tileHeight = (int) dir
0432: .getFieldAsLong(TIFFImageDecoder.TIFF_TILE_LENGTH);
0433: tileOffsets = (dir
0434: .getField(TIFFImageDecoder.TIFF_TILE_OFFSETS))
0435: .getAsLongs();
0436: tileByteCounts = getFieldAsLongs(dir
0437: .getField(TIFFImageDecoder.TIFF_TILE_BYTE_COUNTS));
0438:
0439: } else {
0440: tiled = false;
0441:
0442: // Image is in stripped format, looks like tiles to us
0443: // Note: Some legacy files may have tile width and height
0444: // written but use the strip offsets and byte counts fields
0445: // instead of the tile offsets and byte counts. Therefore
0446: // we default here to the tile dimensions if they are written.
0447: tileWidth = dir.getField(TIFFImageDecoder.TIFF_TILE_WIDTH) != null ? (int) dir
0448: .getFieldAsLong(TIFFImageDecoder.TIFF_TILE_WIDTH)
0449: : bounds.width;
0450: TIFFField field = dir
0451: .getField(TIFFImageDecoder.TIFF_ROWS_PER_STRIP);
0452: if (field == null) {
0453: // Default is infinity (2^32 -1), basically the entire image
0454:
0455: tileHeight = dir
0456: .getField(TIFFImageDecoder.TIFF_TILE_LENGTH) != null ? (int) dir
0457: .getFieldAsLong(TIFFImageDecoder.TIFF_TILE_LENGTH)
0458: : bounds.height;
0459: } else {
0460: long l = field.getAsLong(0);
0461: long infinity = 1;
0462: infinity = (infinity << 32) - 1;
0463: if (l == infinity) {
0464: // 2^32 - 1 (effectively infinity, entire image is 1 strip)
0465: tileHeight = bounds.height;
0466: } else {
0467: tileHeight = (int) l;
0468: }
0469: }
0470:
0471: TIFFField tileOffsetsField = dir
0472: .getField(TIFFImageDecoder.TIFF_STRIP_OFFSETS);
0473: if (tileOffsetsField == null) {
0474: throw new RuntimeException("TIFFImage5");
0475: } else {
0476: tileOffsets = getFieldAsLongs(tileOffsetsField);
0477: }
0478:
0479: TIFFField tileByteCountsField = dir
0480: .getField(TIFFImageDecoder.TIFF_STRIP_BYTE_COUNTS);
0481: if (tileByteCountsField == null) {
0482: throw new RuntimeException("TIFFImage6");
0483: } else {
0484: tileByteCounts = getFieldAsLongs(tileByteCountsField);
0485: }
0486: }
0487:
0488: // Calculate number of tiles and the tileSize in bytes
0489: tilesX = (bounds.width + tileWidth - 1) / tileWidth;
0490: tilesY = (bounds.height + tileHeight - 1) / tileHeight;
0491: tileSize = tileWidth * tileHeight * numBands;
0492:
0493: // Check whether big endian or little endian format is used.
0494: isBigEndian = dir.isBigEndian();
0495:
0496: TIFFField fillOrderField = dir
0497: .getField(TIFFImageDecoder.TIFF_FILL_ORDER);
0498: if (fillOrderField != null) {
0499: fillOrder = fillOrderField.getAsInt(0);
0500: } else {
0501: // Default Fill Order
0502: fillOrder = 1;
0503: }
0504:
0505: switch (compression) {
0506: case COMP_NONE:
0507: case COMP_PACKBITS:
0508: // Do nothing.
0509: break;
0510: case COMP_DEFLATE:
0511: inflater = new Inflater();
0512: break;
0513: case COMP_FAX_G3_1D:
0514: case COMP_FAX_G3_2D:
0515: case COMP_FAX_G4_2D:
0516: if (sampleSize != 1) {
0517: throw new RuntimeException("TIFFImage7");
0518: }
0519:
0520: // Fax T.4 compression options
0521: if (compression == 3) {
0522: TIFFField t4OptionsField = dir
0523: .getField(TIFFImageDecoder.TIFF_T4_OPTIONS);
0524: if (t4OptionsField != null) {
0525: tiffT4Options = t4OptionsField.getAsLong(0);
0526: } else {
0527: // Use default value
0528: tiffT4Options = 0;
0529: }
0530: }
0531:
0532: // Fax T.6 compression options
0533: if (compression == 4) {
0534: TIFFField t6OptionsField = dir
0535: .getField(TIFFImageDecoder.TIFF_T6_OPTIONS);
0536: if (t6OptionsField != null) {
0537: tiffT6Options = t6OptionsField.getAsLong(0);
0538: } else {
0539: // Use default value
0540: tiffT6Options = 0;
0541: }
0542: }
0543:
0544: // Fax encoding, need to create the Fax decoder.
0545: decoder = new TIFFFaxDecoder(fillOrder, tileWidth,
0546: tileHeight);
0547: break;
0548:
0549: case COMP_LZW:
0550: // LZW compression used, need to create the LZW decoder.
0551: TIFFField predictorField = dir
0552: .getField(TIFFImageDecoder.TIFF_PREDICTOR);
0553:
0554: if (predictorField == null) {
0555: predictor = 1;
0556: } else {
0557: predictor = predictorField.getAsInt(0);
0558:
0559: if (predictor != 1 && predictor != 2) {
0560: throw new RuntimeException("TIFFImage8");
0561: }
0562:
0563: if (predictor == 2 && sampleSize != 8) {
0564: throw new RuntimeException(sampleSize
0565: + "TIFFImage9");
0566: }
0567: }
0568:
0569: lzwDecoder = new TIFFLZWDecoder(tileWidth, predictor,
0570: samplesPerPixel);
0571: break;
0572:
0573: case COMP_JPEG_OLD:
0574: throw new RuntimeException("TIFFImage15");
0575:
0576: case COMP_JPEG_TTN2:
0577: if (!(sampleSize == 8 && ((imageType == TYPE_GRAY && samplesPerPixel == 1)
0578: || (imageType == TYPE_PALETTE && samplesPerPixel == 1) || (imageType == TYPE_RGB && samplesPerPixel == 3)))) {
0579: throw new RuntimeException("TIFFImage16");
0580: }
0581:
0582: // Create decodeParam from JPEGTables field if present.
0583: if (dir.isTagPresent(TIFF_JPEG_TABLES)) {
0584: TIFFField jpegTableField = dir
0585: .getField(TIFF_JPEG_TABLES);
0586: byte[] jpegTable = jpegTableField.getAsBytes();
0587: ByteArrayInputStream tableStream = new ByteArrayInputStream(
0588: jpegTable);
0589: JPEGImageDecoder decoder = JPEGCodec
0590: .createJPEGDecoder(tableStream);
0591: decoder.decodeAsRaster();
0592: decodeParam = decoder.getJPEGDecodeParam();
0593: }
0594:
0595: break;
0596: default:
0597: throw new RuntimeException("TIFFImage10");
0598: }
0599:
0600: ColorModel colorModel = null;
0601: SampleModel sampleModel = null;
0602: switch (imageType) {
0603: case TYPE_BILEVEL:
0604: case TYPE_GRAY_4BIT:
0605: sampleModel = new MultiPixelPackedSampleModel(dataType,
0606: tileWidth, tileHeight, sampleSize);
0607: if (imageType == TYPE_BILEVEL) {
0608: byte[] map = new byte[] {
0609: (byte) (isWhiteZero ? 255 : 0),
0610: (byte) (isWhiteZero ? 0 : 255) };
0611: colorModel = new IndexColorModel(1, 2, map, map, map);
0612: } else {
0613: byte[] map = new byte[16];
0614: if (isWhiteZero) {
0615: for (int i = 0; i < map.length; i++)
0616: map[i] = (byte) (255 - (16 * i));
0617: } else {
0618: for (int i = 0; i < map.length; i++)
0619: map[i] = (byte) (16 * i);
0620: }
0621: colorModel = new IndexColorModel(4, 16, map, map, map);
0622: }
0623: break;
0624:
0625: case TYPE_GRAY:
0626: case TYPE_GRAY_ALPHA:
0627: case TYPE_RGB:
0628: case TYPE_RGB_ALPHA:
0629: // Create a pixel interleaved SampleModel with decreasing
0630: // band offsets.
0631: int[] reverseOffsets = new int[numBands];
0632: for (int i = 0; i < numBands; i++) {
0633: reverseOffsets[i] = numBands - 1 - i;
0634: }
0635: sampleModel = new PixelInterleavedSampleModel(dataType,
0636: tileWidth, tileHeight, numBands, numBands
0637: * tileWidth, reverseOffsets);
0638:
0639: if (imageType == TYPE_GRAY) {
0640: colorModel = new ComponentColorModel(ColorSpace
0641: .getInstance(ColorSpace.CS_GRAY),
0642: new int[] { sampleSize }, false, false,
0643: Transparency.OPAQUE, dataType);
0644: } else if (imageType == TYPE_RGB) {
0645: colorModel = new ComponentColorModel(ColorSpace
0646: .getInstance(ColorSpace.CS_sRGB), new int[] {
0647: sampleSize, sampleSize, sampleSize }, false,
0648: false, Transparency.OPAQUE, dataType);
0649: } else { // hasAlpha
0650: // Transparency.OPAQUE signifies image data that is
0651: // completely opaque, meaning that all pixels have an alpha
0652: // value of 1.0. So the extra band gets ignored, which is
0653: // what we want.
0654: int transparency = Transparency.OPAQUE;
0655: if (extraSamples == 1) { // associated (premultiplied) alpha
0656: transparency = Transparency.TRANSLUCENT;
0657: } else if (extraSamples == 2) { // unassociated alpha
0658: transparency = Transparency.BITMASK;
0659: }
0660:
0661: colorModel = createAlphaComponentColorModel(dataType,
0662: numBands, extraSamples == 1, transparency);
0663: }
0664: break;
0665:
0666: case TYPE_GENERIC:
0667: case TYPE_YCBCR_SUB:
0668: // For this case we can't display the image, so we create a
0669: // SampleModel with increasing bandOffsets, and keep the
0670: // ColorModel as null, as there is no appropriate ColorModel.
0671:
0672: int[] bandOffsets = new int[numBands];
0673: for (int i = 0; i < numBands; i++) {
0674: bandOffsets[i] = i;
0675: }
0676:
0677: sampleModel = new PixelInterleavedSampleModel(dataType,
0678: tileWidth, tileHeight, numBands, numBands
0679: * tileWidth, bandOffsets);
0680: colorModel = null;
0681: break;
0682:
0683: case TYPE_PALETTE:
0684: // Get the colormap
0685: TIFFField cfield = dir
0686: .getField(TIFFImageDecoder.TIFF_COLORMAP);
0687: if (cfield == null) {
0688: throw new RuntimeException("TIFFImage11");
0689: } else {
0690: colormap = cfield.getAsChars();
0691: }
0692:
0693: // Could be either 1 or 3 bands depending on whether we use
0694: // IndexColorModel or not.
0695: if (decodePaletteAsShorts) {
0696: numBands = 3;
0697:
0698: // If no SampleFormat tag was specified and if the
0699: // sampleSize is less than or equal to 8, then the
0700: // dataType was initially set to byte, but now we want to
0701: // expand the palette as shorts, so the dataType should
0702: // be ushort.
0703: if (dataType == DataBuffer.TYPE_BYTE) {
0704: dataType = DataBuffer.TYPE_USHORT;
0705: }
0706:
0707: // Data will have to be unpacked into a 3 band short image
0708: // as we do not have a IndexColorModel that can deal with
0709: // a colormodel whose entries are of short data type.
0710: sampleModel = createPixelInterleavedSampleModel(
0711: dataType, tileWidth, tileHeight, numBands);
0712:
0713: colorModel = new ComponentColorModel(ColorSpace
0714: .getInstance(ColorSpace.CS_sRGB), new int[] {
0715: 16, 16, 16 }, false, false,
0716: Transparency.OPAQUE, dataType);
0717:
0718: } else {
0719:
0720: numBands = 1;
0721:
0722: if (sampleSize == 4) {
0723: // Pixel data will not be unpacked, will use
0724: // MPPSM to store packed data and
0725: // IndexColorModel to do the unpacking.
0726: sampleModel = new MultiPixelPackedSampleModel(
0727: DataBuffer.TYPE_BYTE, tileWidth,
0728: tileHeight, sampleSize);
0729: } else if (sampleSize == 8) {
0730:
0731: sampleModel = createPixelInterleavedSampleModel(
0732: DataBuffer.TYPE_BYTE, tileWidth,
0733: tileHeight, numBands);
0734: } else if (sampleSize == 16) {
0735:
0736: // Here datatype has to be unsigned since we
0737: // are storing indices into the
0738: // IndexColorModel palette. Ofcourse the
0739: // actual palette entries are allowed to be
0740: // negative.
0741: dataType = DataBuffer.TYPE_USHORT;
0742: sampleModel = createPixelInterleavedSampleModel(
0743: DataBuffer.TYPE_USHORT, tileWidth,
0744: tileHeight, numBands);
0745: }
0746:
0747: int bandLength = colormap.length / 3;
0748: byte[] r = new byte[bandLength];
0749: byte[] g = new byte[bandLength];
0750: byte[] b = new byte[bandLength];
0751:
0752: int gIndex = bandLength;
0753: int bIndex = bandLength * 2;
0754:
0755: if (dataType == DataBuffer.TYPE_SHORT) {
0756:
0757: for (int i = 0; i < bandLength; i++) {
0758: r[i] = param
0759: .decodeSigned16BitsTo8Bits((short) colormap[i]);
0760: g[i] = param
0761: .decodeSigned16BitsTo8Bits((short) colormap[gIndex
0762: + i]);
0763: b[i] = param
0764: .decodeSigned16BitsTo8Bits((short) colormap[bIndex
0765: + i]);
0766: }
0767:
0768: } else {
0769:
0770: for (int i = 0; i < bandLength; i++) {
0771: r[i] = param
0772: .decode16BitsTo8Bits(colormap[i] & 0xffff);
0773: g[i] = param
0774: .decode16BitsTo8Bits(colormap[gIndex
0775: + i] & 0xffff);
0776: b[i] = param
0777: .decode16BitsTo8Bits(colormap[bIndex
0778: + i] & 0xffff);
0779: }
0780:
0781: }
0782:
0783: colorModel = new IndexColorModel(sampleSize,
0784: bandLength, r, g, b);
0785: }
0786: break;
0787:
0788: default:
0789: throw new RuntimeException("TIFFImage4");
0790: }
0791:
0792: Map properties = new HashMap();
0793: // Set a property "tiff_directory".
0794: properties.put("tiff_directory", dir);
0795:
0796: // System.out.println("Constructed TIFF");
0797:
0798: init((CachableRed) null, bounds, colorModel, sampleModel, 0, 0,
0799: properties);
0800: }
0801:
0802: /**
0803: * Reads a private IFD from a given offset in the stream. This
0804: * method may be used to obtain IFDs that are referenced
0805: * only by private tag values.
0806: */
0807: public TIFFDirectory getPrivateIFD(long offset) throws IOException {
0808: return new TIFFDirectory(stream, offset, 0);
0809: }
0810:
0811: public WritableRaster copyData(WritableRaster wr) {
0812: copyToRaster(wr);
0813: return wr;
0814: }
0815:
0816: /**
0817: * Returns tile (tileX, tileY) as a Raster.
0818: */
0819: public synchronized Raster getTile(int tileX, int tileY) {
0820: if ((tileX < 0) || (tileX >= tilesX) || (tileY < 0)
0821: || (tileY >= tilesY)) {
0822: throw new IllegalArgumentException("TIFFImage12");
0823: }
0824:
0825: // System.out.println("Called TIFF getTile:" + tileX + "," + tileY);
0826:
0827: // Get the data array out of the DataBuffer
0828: byte[] bdata = null;
0829: short[] sdata = null;
0830: int[] idata = null;
0831:
0832: SampleModel sampleModel = getSampleModel();
0833: WritableRaster tile = makeTile(tileX, tileY);
0834:
0835: DataBuffer buffer = tile.getDataBuffer();
0836:
0837: int dataType = sampleModel.getDataType();
0838: if (dataType == DataBuffer.TYPE_BYTE) {
0839: bdata = ((DataBufferByte) buffer).getData();
0840: } else if (dataType == DataBuffer.TYPE_USHORT) {
0841: sdata = ((DataBufferUShort) buffer).getData();
0842: } else if (dataType == DataBuffer.TYPE_SHORT) {
0843: sdata = ((DataBufferShort) buffer).getData();
0844: } else if (dataType == DataBuffer.TYPE_INT) {
0845: idata = ((DataBufferInt) buffer).getData();
0846: }
0847:
0848: // Variables used for swapping when converting from RGB to BGR
0849: byte bswap;
0850: short sswap;
0851: int iswap;
0852:
0853: // Save original file pointer position and seek to tile data location.
0854: long save_offset = 0;
0855: try {
0856: save_offset = stream.getFilePointer();
0857: stream.seek(tileOffsets[tileY * tilesX + tileX]);
0858: } catch (IOException ioe) {
0859: throw new RuntimeException("TIFFImage13");
0860: }
0861:
0862: // Number of bytes in this tile (strip) after compression.
0863: int byteCount = (int) tileByteCounts[tileY * tilesX + tileX];
0864:
0865: // Find out the number of bytes in the current tile
0866: Rectangle newRect;
0867: if (!tiled)
0868: newRect = tile.getBounds();
0869: else
0870: newRect = new Rectangle(tile.getMinX(), tile.getMinY(),
0871: tileWidth, tileHeight);
0872:
0873: int unitsInThisTile = newRect.width * newRect.height * numBands;
0874:
0875: // Allocate read buffer if needed.
0876: byte[] data = compression != COMP_NONE
0877: || imageType == TYPE_PALETTE ? new byte[byteCount]
0878: : null;
0879:
0880: // Read the data, uncompressing as needed. There are four cases:
0881: // bilevel, palette-RGB, 4-bit grayscale, and everything else.
0882: if (imageType == TYPE_BILEVEL) { // bilevel
0883: try {
0884: if (compression == COMP_PACKBITS) {
0885: stream.readFully(data, 0, byteCount);
0886:
0887: // Since the decompressed data will still be packed
0888: // 8 pixels into 1 byte, calculate bytesInThisTile
0889: int bytesInThisTile;
0890: if ((newRect.width % 8) == 0) {
0891: bytesInThisTile = (newRect.width / 8)
0892: * newRect.height;
0893: } else {
0894: bytesInThisTile = (newRect.width / 8 + 1)
0895: * newRect.height;
0896: }
0897: decodePackbits(data, bytesInThisTile, bdata);
0898: } else if (compression == COMP_LZW) {
0899: stream.readFully(data, 0, byteCount);
0900: lzwDecoder.decode(data, bdata, newRect.height);
0901: } else if (compression == COMP_FAX_G3_1D) {
0902: stream.readFully(data, 0, byteCount);
0903: decoder.decode1D(bdata, data, 0, newRect.height);
0904: } else if (compression == COMP_FAX_G3_2D) {
0905: stream.readFully(data, 0, byteCount);
0906: decoder.decode2D(bdata, data, 0, newRect.height,
0907: tiffT4Options);
0908: } else if (compression == COMP_FAX_G4_2D) {
0909: stream.readFully(data, 0, byteCount);
0910: decoder.decodeT6(bdata, data, 0, newRect.height,
0911: tiffT6Options);
0912: } else if (compression == COMP_DEFLATE) {
0913: stream.readFully(data, 0, byteCount);
0914: inflate(data, bdata);
0915: } else if (compression == COMP_NONE) {
0916: stream.readFully(bdata, 0, byteCount);
0917: }
0918:
0919: stream.seek(save_offset);
0920: } catch (IOException ioe) {
0921: throw new RuntimeException("TIFFImage13");
0922: }
0923: } else if (imageType == TYPE_PALETTE) { // palette-RGB
0924: if (sampleSize == 16) {
0925:
0926: if (decodePaletteAsShorts) {
0927:
0928: short[] tempData = null;
0929:
0930: // At this point the data is 1 banded and will
0931: // become 3 banded only after we've done the palette
0932: // lookup, since unitsInThisTile was calculated with
0933: // 3 bands, we need to divide this by 3.
0934: int unitsBeforeLookup = unitsInThisTile / 3;
0935:
0936: // Since unitsBeforeLookup is the number of shorts,
0937: // but we do our decompression in terms of bytes, we
0938: // need to multiply it by 2 in order to figure out
0939: // how many bytes we'll get after decompression.
0940: int entries = unitsBeforeLookup * 2;
0941:
0942: // Read the data, if compressed, decode it, reset the pointer
0943: try {
0944:
0945: if (compression == COMP_PACKBITS) {
0946:
0947: stream.readFully(data, 0, byteCount);
0948:
0949: byte[] byteArray = new byte[entries];
0950: decodePackbits(data, entries, byteArray);
0951: tempData = new short[unitsBeforeLookup];
0952: interpretBytesAsShorts(byteArray, tempData,
0953: unitsBeforeLookup);
0954:
0955: } else if (compression == COMP_LZW) {
0956:
0957: // Read in all the compressed data for this tile
0958: stream.readFully(data, 0, byteCount);
0959:
0960: byte[] byteArray = new byte[entries];
0961: lzwDecoder.decode(data, byteArray,
0962: newRect.height);
0963: tempData = new short[unitsBeforeLookup];
0964: interpretBytesAsShorts(byteArray, tempData,
0965: unitsBeforeLookup);
0966:
0967: } else if (compression == COMP_DEFLATE) {
0968:
0969: stream.readFully(data, 0, byteCount);
0970: byte[] byteArray = new byte[entries];
0971: inflate(data, byteArray);
0972: tempData = new short[unitsBeforeLookup];
0973: interpretBytesAsShorts(byteArray, tempData,
0974: unitsBeforeLookup);
0975:
0976: } else if (compression == COMP_NONE) {
0977:
0978: // byteCount tells us how many bytes are there
0979: // in this tile, but we need to read in shorts,
0980: // which will take half the space, so while
0981: // allocating we divide byteCount by 2.
0982: tempData = new short[byteCount / 2];
0983: readShorts(byteCount / 2, tempData);
0984: }
0985:
0986: stream.seek(save_offset);
0987:
0988: } catch (IOException ioe) {
0989: throw new RuntimeException("TIFFImage13");
0990: }
0991:
0992: if (dataType == DataBuffer.TYPE_USHORT) {
0993:
0994: // Expand the palette image into an rgb image with ushort
0995: // data type.
0996: int cmapValue;
0997: int count = 0, lookup, len = colormap.length / 3;
0998: int len2 = len * 2;
0999: for (int i = 0; i < unitsBeforeLookup; i++) {
1000: // Get the index into the colormap
1001: lookup = tempData[i] & 0xffff;
1002: // Get the blue value
1003: cmapValue = colormap[lookup + len2];
1004: sdata[count++] = (short) (cmapValue & 0xffff);
1005: // Get the green value
1006: cmapValue = colormap[lookup + len];
1007: sdata[count++] = (short) (cmapValue & 0xffff);
1008: // Get the red value
1009: cmapValue = colormap[lookup];
1010: sdata[count++] = (short) (cmapValue & 0xffff);
1011: }
1012:
1013: } else if (dataType == DataBuffer.TYPE_SHORT) {
1014:
1015: // Expand the palette image into an rgb image with
1016: // short data type.
1017: int cmapValue;
1018: int count = 0, lookup, len = colormap.length / 3;
1019: int len2 = len * 2;
1020: for (int i = 0; i < unitsBeforeLookup; i++) {
1021: // Get the index into the colormap
1022: lookup = tempData[i] & 0xffff;
1023: // Get the blue value
1024: cmapValue = colormap[lookup + len2];
1025: sdata[count++] = (short) cmapValue;
1026: // Get the green value
1027: cmapValue = colormap[lookup + len];
1028: sdata[count++] = (short) cmapValue;
1029: // Get the red value
1030: cmapValue = colormap[lookup];
1031: sdata[count++] = (short) cmapValue;
1032: }
1033: }
1034:
1035: } else {
1036:
1037: // No lookup being done here, when RGB values are needed,
1038: // the associated IndexColorModel can be used to get them.
1039:
1040: try {
1041:
1042: if (compression == COMP_PACKBITS) {
1043:
1044: stream.readFully(data, 0, byteCount);
1045:
1046: // Since unitsInThisTile is the number of shorts,
1047: // but we do our decompression in terms of bytes, we
1048: // need to multiply unitsInThisTile by 2 in order to
1049: // figure out how many bytes we'll get after
1050: // decompression.
1051: int bytesInThisTile = unitsInThisTile * 2;
1052:
1053: byte[] byteArray = new byte[bytesInThisTile];
1054: decodePackbits(data, bytesInThisTile,
1055: byteArray);
1056: interpretBytesAsShorts(byteArray, sdata,
1057: unitsInThisTile);
1058:
1059: } else if (compression == COMP_LZW) {
1060:
1061: stream.readFully(data, 0, byteCount);
1062:
1063: // Since unitsInThisTile is the number of shorts,
1064: // but we do our decompression in terms of bytes, we
1065: // need to multiply unitsInThisTile by 2 in order to
1066: // figure out how many bytes we'll get after
1067: // decompression.
1068: byte[] byteArray = new byte[unitsInThisTile * 2];
1069: lzwDecoder.decode(data, byteArray,
1070: newRect.height);
1071: interpretBytesAsShorts(byteArray, sdata,
1072: unitsInThisTile);
1073:
1074: } else if (compression == COMP_DEFLATE) {
1075:
1076: stream.readFully(data, 0, byteCount);
1077: byte[] byteArray = new byte[unitsInThisTile * 2];
1078: inflate(data, byteArray);
1079: interpretBytesAsShorts(byteArray, sdata,
1080: unitsInThisTile);
1081:
1082: } else if (compression == COMP_NONE) {
1083:
1084: readShorts(byteCount / 2, sdata);
1085: }
1086:
1087: stream.seek(save_offset);
1088:
1089: } catch (IOException ioe) {
1090: throw new RuntimeException("TIFFImage13");
1091: }
1092: }
1093:
1094: } else if (sampleSize == 8) {
1095:
1096: if (decodePaletteAsShorts) {
1097:
1098: byte[] tempData = null;
1099:
1100: // At this point the data is 1 banded and will
1101: // become 3 banded only after we've done the palette
1102: // lookup, since unitsInThisTile was calculated with
1103: // 3 bands, we need to divide this by 3.
1104: int unitsBeforeLookup = unitsInThisTile / 3;
1105:
1106: // Read the data, if compressed, decode it, reset the pointer
1107: try {
1108:
1109: if (compression == COMP_PACKBITS) {
1110:
1111: stream.readFully(data, 0, byteCount);
1112: tempData = new byte[unitsBeforeLookup];
1113: decodePackbits(data, unitsBeforeLookup,
1114: tempData);
1115:
1116: } else if (compression == COMP_LZW) {
1117:
1118: stream.readFully(data, 0, byteCount);
1119: tempData = new byte[unitsBeforeLookup];
1120: lzwDecoder.decode(data, tempData,
1121: newRect.height);
1122:
1123: } else if (compression == COMP_JPEG_TTN2) {
1124:
1125: stream.readFully(data, 0, byteCount);
1126: Raster tempTile = decodeJPEG(data,
1127: decodeParam, colorConvertJPEG, tile
1128: .getMinX(), tile.getMinY());
1129: int[] tempPixels = new int[unitsBeforeLookup];
1130: tempTile.getPixels(tile.getMinX(), tile
1131: .getMinY(), tile.getWidth(), tile
1132: .getHeight(), tempPixels);
1133: tempData = new byte[unitsBeforeLookup];
1134: for (int i = 0; i < unitsBeforeLookup; i++) {
1135: tempData[i] = (byte) tempPixels[i];
1136: }
1137:
1138: } else if (compression == COMP_DEFLATE) {
1139:
1140: stream.readFully(data, 0, byteCount);
1141: tempData = new byte[unitsBeforeLookup];
1142: inflate(data, tempData);
1143:
1144: } else if (compression == COMP_NONE) {
1145:
1146: tempData = new byte[byteCount];
1147: stream.readFully(tempData, 0, byteCount);
1148: }
1149:
1150: stream.seek(save_offset);
1151:
1152: } catch (IOException ioe) {
1153: throw new RuntimeException("TIFFImage13");
1154: }
1155:
1156: // Expand the palette image into an rgb image with ushort
1157: // data type.
1158: int cmapValue;
1159: int count = 0, lookup, len = colormap.length / 3;
1160: int len2 = len * 2;
1161: for (int i = 0; i < unitsBeforeLookup; i++) {
1162: // Get the index into the colormap
1163: lookup = tempData[i] & 0xff;
1164: // Get the blue value
1165: cmapValue = colormap[lookup + len2];
1166: sdata[count++] = (short) (cmapValue & 0xffff);
1167: // Get the green value
1168: cmapValue = colormap[lookup + len];
1169: sdata[count++] = (short) (cmapValue & 0xffff);
1170: // Get the red value
1171: cmapValue = colormap[lookup];
1172: sdata[count++] = (short) (cmapValue & 0xffff);
1173: }
1174: } else {
1175:
1176: // No lookup being done here, when RGB values are needed,
1177: // the associated IndexColorModel can be used to get them.
1178:
1179: try {
1180:
1181: if (compression == COMP_PACKBITS) {
1182:
1183: stream.readFully(data, 0, byteCount);
1184: decodePackbits(data, unitsInThisTile, bdata);
1185:
1186: } else if (compression == COMP_LZW) {
1187:
1188: stream.readFully(data, 0, byteCount);
1189: lzwDecoder.decode(data, bdata,
1190: newRect.height);
1191:
1192: } else if (compression == COMP_JPEG_TTN2) {
1193:
1194: stream.readFully(data, 0, byteCount);
1195: tile.setRect(decodeJPEG(data, decodeParam,
1196: colorConvertJPEG, tile.getMinX(),
1197: tile.getMinY()));
1198:
1199: } else if (compression == COMP_DEFLATE) {
1200:
1201: stream.readFully(data, 0, byteCount);
1202: inflate(data, bdata);
1203:
1204: } else if (compression == COMP_NONE) {
1205:
1206: stream.readFully(bdata, 0, byteCount);
1207: }
1208:
1209: stream.seek(save_offset);
1210:
1211: } catch (IOException ioe) {
1212: throw new RuntimeException("TIFFImage13");
1213: }
1214: }
1215:
1216: } else if (sampleSize == 4) {
1217:
1218: int padding = (newRect.width % 2 == 0) ? 0 : 1;
1219: int bytesPostDecoding = ((newRect.width / 2 + padding) * newRect.height);
1220:
1221: // Output short images
1222: if (decodePaletteAsShorts) {
1223:
1224: byte[] tempData = null;
1225:
1226: try {
1227: stream.readFully(data, 0, byteCount);
1228: stream.seek(save_offset);
1229: } catch (IOException ioe) {
1230: throw new RuntimeException("TIFFImage13");
1231: }
1232:
1233: // If compressed, decode the data.
1234: if (compression == COMP_PACKBITS) {
1235:
1236: tempData = new byte[bytesPostDecoding];
1237: decodePackbits(data, bytesPostDecoding,
1238: tempData);
1239:
1240: } else if (compression == COMP_LZW) {
1241:
1242: tempData = new byte[bytesPostDecoding];
1243: lzwDecoder.decode(data, tempData,
1244: newRect.height);
1245:
1246: } else if (compression == COMP_DEFLATE) {
1247:
1248: tempData = new byte[bytesPostDecoding];
1249: inflate(data, tempData);
1250:
1251: } else if (compression == COMP_NONE) {
1252:
1253: tempData = data;
1254: }
1255:
1256: int bytes = unitsInThisTile / 3;
1257:
1258: // Unpack the 2 pixels packed into each byte.
1259: data = new byte[bytes];
1260:
1261: int srcCount = 0, dstCount = 0;
1262: for (int j = 0; j < newRect.height; j++) {
1263: for (int i = 0; i < newRect.width / 2; i++) {
1264: data[dstCount++] = (byte) ((tempData[srcCount] & 0xf0) >> 4);
1265: data[dstCount++] = (byte) (tempData[srcCount++] & 0x0f);
1266: }
1267:
1268: if (padding == 1) {
1269: data[dstCount++] = (byte) ((tempData[srcCount++] & 0xf0) >> 4);
1270: }
1271: }
1272:
1273: int len = colormap.length / 3;
1274: int len2 = len * 2;
1275: int cmapValue, lookup;
1276: int count = 0;
1277: for (int i = 0; i < bytes; i++) {
1278: lookup = data[i] & 0xff;
1279: cmapValue = colormap[lookup + len2];
1280: sdata[count++] = (short) (cmapValue & 0xffff);
1281: cmapValue = colormap[lookup + len];
1282: sdata[count++] = (short) (cmapValue & 0xffff);
1283: cmapValue = colormap[lookup];
1284: sdata[count++] = (short) (cmapValue & 0xffff);
1285: }
1286: } else {
1287:
1288: // Output byte values, use IndexColorModel for unpacking
1289: try {
1290:
1291: // If compressed, decode the data.
1292: if (compression == COMP_PACKBITS) {
1293:
1294: stream.readFully(data, 0, byteCount);
1295: decodePackbits(data, bytesPostDecoding,
1296: bdata);
1297:
1298: } else if (compression == COMP_LZW) {
1299:
1300: stream.readFully(data, 0, byteCount);
1301: lzwDecoder.decode(data, bdata,
1302: newRect.height);
1303:
1304: } else if (compression == COMP_DEFLATE) {
1305:
1306: stream.readFully(data, 0, byteCount);
1307: inflate(data, bdata);
1308:
1309: } else if (compression == COMP_NONE) {
1310:
1311: stream.readFully(bdata, 0, byteCount);
1312: }
1313:
1314: stream.seek(save_offset);
1315:
1316: } catch (IOException ioe) {
1317: throw new RuntimeException("TIFFImage13");
1318: }
1319: }
1320: }
1321: } else if (imageType == TYPE_GRAY_4BIT) { // 4-bit gray
1322: try {
1323: if (compression == COMP_PACKBITS) {
1324:
1325: stream.readFully(data, 0, byteCount);
1326:
1327: // Since the decompressed data will still be packed
1328: // 2 pixels into 1 byte, calculate bytesInThisTile
1329: int bytesInThisTile;
1330: if ((newRect.width % 8) == 0) {
1331: bytesInThisTile = (newRect.width / 2)
1332: * newRect.height;
1333: } else {
1334: bytesInThisTile = (newRect.width / 2 + 1)
1335: * newRect.height;
1336: }
1337:
1338: decodePackbits(data, bytesInThisTile, bdata);
1339:
1340: } else if (compression == COMP_LZW) {
1341:
1342: stream.readFully(data, 0, byteCount);
1343: lzwDecoder.decode(data, bdata, newRect.height);
1344:
1345: } else if (compression == COMP_DEFLATE) {
1346:
1347: stream.readFully(data, 0, byteCount);
1348: inflate(data, bdata);
1349:
1350: } else {
1351:
1352: stream.readFully(bdata, 0, byteCount);
1353: }
1354:
1355: stream.seek(save_offset);
1356: } catch (IOException ioe) {
1357: throw new RuntimeException("TIFFImage13");
1358: }
1359: } else { // everything else
1360: try {
1361:
1362: if (sampleSize == 8) {
1363:
1364: if (compression == COMP_NONE) {
1365: stream.readFully(bdata, 0, byteCount);
1366:
1367: } else if (compression == COMP_LZW) {
1368:
1369: stream.readFully(data, 0, byteCount);
1370: lzwDecoder.decode(data, bdata, newRect.height);
1371:
1372: } else if (compression == COMP_PACKBITS) {
1373:
1374: stream.readFully(data, 0, byteCount);
1375: decodePackbits(data, unitsInThisTile, bdata);
1376:
1377: } else if (compression == COMP_JPEG_TTN2) {
1378:
1379: stream.readFully(data, 0, byteCount);
1380: tile.setRect(decodeJPEG(data, decodeParam,
1381: colorConvertJPEG, tile.getMinX(), tile
1382: .getMinY()));
1383: } else if (compression == COMP_DEFLATE) {
1384:
1385: stream.readFully(data, 0, byteCount);
1386: inflate(data, bdata);
1387: }
1388:
1389: } else if (sampleSize == 16) {
1390:
1391: if (compression == COMP_NONE) {
1392:
1393: readShorts(byteCount / 2, sdata);
1394:
1395: } else if (compression == COMP_LZW) {
1396:
1397: stream.readFully(data, 0, byteCount);
1398:
1399: // Since unitsInThisTile is the number of shorts,
1400: // but we do our decompression in terms of bytes, we
1401: // need to multiply unitsInThisTile by 2 in order to
1402: // figure out how many bytes we'll get after
1403: // decompression.
1404: byte[] byteArray = new byte[unitsInThisTile * 2];
1405: lzwDecoder.decode(data, byteArray,
1406: newRect.height);
1407: interpretBytesAsShorts(byteArray, sdata,
1408: unitsInThisTile);
1409:
1410: } else if (compression == COMP_PACKBITS) {
1411:
1412: stream.readFully(data, 0, byteCount);
1413:
1414: // Since unitsInThisTile is the number of shorts,
1415: // but we do our decompression in terms of bytes, we
1416: // need to multiply unitsInThisTile by 2 in order to
1417: // figure out how many bytes we'll get after
1418: // decompression.
1419: int bytesInThisTile = unitsInThisTile * 2;
1420:
1421: byte[] byteArray = new byte[bytesInThisTile];
1422: decodePackbits(data, bytesInThisTile, byteArray);
1423: interpretBytesAsShorts(byteArray, sdata,
1424: unitsInThisTile);
1425: } else if (compression == COMP_DEFLATE) {
1426:
1427: stream.readFully(data, 0, byteCount);
1428: byte[] byteArray = new byte[unitsInThisTile * 2];
1429: inflate(data, byteArray);
1430: interpretBytesAsShorts(byteArray, sdata,
1431: unitsInThisTile);
1432:
1433: }
1434: } else if (sampleSize == 32
1435: && dataType == DataBuffer.TYPE_INT) { // redundant
1436: if (compression == COMP_NONE) {
1437:
1438: readInts(byteCount / 4, idata);
1439:
1440: } else if (compression == COMP_LZW) {
1441:
1442: stream.readFully(data, 0, byteCount);
1443:
1444: // Since unitsInThisTile is the number of ints,
1445: // but we do our decompression in terms of bytes, we
1446: // need to multiply unitsInThisTile by 4 in order to
1447: // figure out how many bytes we'll get after
1448: // decompression.
1449: byte[] byteArray = new byte[unitsInThisTile * 4];
1450: lzwDecoder.decode(data, byteArray,
1451: newRect.height);
1452: interpretBytesAsInts(byteArray, idata,
1453: unitsInThisTile);
1454:
1455: } else if (compression == COMP_PACKBITS) {
1456:
1457: stream.readFully(data, 0, byteCount);
1458:
1459: // Since unitsInThisTile is the number of ints,
1460: // but we do our decompression in terms of bytes, we
1461: // need to multiply unitsInThisTile by 4 in order to
1462: // figure out how many bytes we'll get after
1463: // decompression.
1464: int bytesInThisTile = unitsInThisTile * 4;
1465:
1466: byte[] byteArray = new byte[bytesInThisTile];
1467: decodePackbits(data, bytesInThisTile, byteArray);
1468: interpretBytesAsInts(byteArray, idata,
1469: unitsInThisTile);
1470: } else if (compression == COMP_DEFLATE) {
1471:
1472: stream.readFully(data, 0, byteCount);
1473: byte[] byteArray = new byte[unitsInThisTile * 4];
1474: inflate(data, byteArray);
1475: interpretBytesAsInts(byteArray, idata,
1476: unitsInThisTile);
1477:
1478: }
1479: }
1480:
1481: stream.seek(save_offset);
1482:
1483: } catch (IOException ioe) {
1484: throw new RuntimeException("TIFFImage13");
1485: }
1486:
1487: // Modify the data for certain special cases.
1488: switch (imageType) {
1489: case TYPE_GRAY:
1490: case TYPE_GRAY_ALPHA:
1491: if (isWhiteZero) {
1492: // Since we are using a ComponentColorModel with this
1493: // image, we need to change the WhiteIsZero data to
1494: // BlackIsZero data so it will display properly.
1495: if (dataType == DataBuffer.TYPE_BYTE
1496: && !(getColorModel() instanceof IndexColorModel)) {
1497:
1498: for (int l = 0; l < bdata.length; l += numBands) {
1499: bdata[l] = (byte) (255 - bdata[l]);
1500: }
1501: } else if (dataType == DataBuffer.TYPE_USHORT) {
1502:
1503: int ushortMax = Short.MAX_VALUE
1504: - Short.MIN_VALUE;
1505: for (int l = 0; l < sdata.length; l += numBands) {
1506: sdata[l] = (short) (ushortMax - sdata[l]);
1507: }
1508:
1509: } else if (dataType == DataBuffer.TYPE_SHORT) {
1510:
1511: for (int l = 0; l < sdata.length; l += numBands) {
1512: sdata[l] = (short) (~sdata[l]);
1513: }
1514: } else if (dataType == DataBuffer.TYPE_INT) {
1515:
1516: long uintMax = ((long) Integer.MAX_VALUE - (long) Integer.MIN_VALUE);
1517: for (int l = 0; l < idata.length; l += numBands) {
1518: idata[l] = (int) (uintMax - idata[l]);
1519: }
1520: }
1521: }
1522: break;
1523: case TYPE_RGB:
1524: // Change RGB to BGR order, as Java2D displays that faster.
1525: // Unnecessary for JPEG-in-TIFF as the decoder handles it.
1526: if (sampleSize == 8 && compression != COMP_JPEG_TTN2) {
1527: for (int i = 0; i < unitsInThisTile; i += 3) {
1528: bswap = bdata[i];
1529: bdata[i] = bdata[i + 2];
1530: bdata[i + 2] = bswap;
1531: }
1532: } else if (sampleSize == 16) {
1533: for (int i = 0; i < unitsInThisTile; i += 3) {
1534: sswap = sdata[i];
1535: sdata[i] = sdata[i + 2];
1536: sdata[i + 2] = sswap;
1537: }
1538: } else if (sampleSize == 32) {
1539: if (dataType == DataBuffer.TYPE_INT) {
1540: for (int i = 0; i < unitsInThisTile; i += 3) {
1541: iswap = idata[i];
1542: idata[i] = idata[i + 2];
1543: idata[i + 2] = iswap;
1544: }
1545: }
1546: }
1547: break;
1548: case TYPE_RGB_ALPHA:
1549: // Convert from RGBA to ABGR for Java2D
1550: if (sampleSize == 8) {
1551: for (int i = 0; i < unitsInThisTile; i += 4) {
1552: // Swap R and A
1553: bswap = bdata[i];
1554: bdata[i] = bdata[i + 3];
1555: bdata[i + 3] = bswap;
1556:
1557: // Swap G and B
1558: bswap = bdata[i + 1];
1559: bdata[i + 1] = bdata[i + 2];
1560: bdata[i + 2] = bswap;
1561: }
1562: } else if (sampleSize == 16) {
1563: for (int i = 0; i < unitsInThisTile; i += 4) {
1564: // Swap R and A
1565: sswap = sdata[i];
1566: sdata[i] = sdata[i + 3];
1567: sdata[i + 3] = sswap;
1568:
1569: // Swap G and B
1570: sswap = sdata[i + 1];
1571: sdata[i + 1] = sdata[i + 2];
1572: sdata[i + 2] = sswap;
1573: }
1574: } else if (sampleSize == 32) {
1575: if (dataType == DataBuffer.TYPE_INT) {
1576: for (int i = 0; i < unitsInThisTile; i += 4) {
1577: // Swap R and A
1578: iswap = idata[i];
1579: idata[i] = idata[i + 3];
1580: idata[i + 3] = iswap;
1581:
1582: // Swap G and B
1583: iswap = idata[i + 1];
1584: idata[i + 1] = idata[i + 2];
1585: idata[i + 2] = iswap;
1586: }
1587: }
1588: }
1589: break;
1590: case TYPE_YCBCR_SUB:
1591: // Post-processing for YCbCr with subsampled chrominance:
1592: // simply replicate the chroma channels for displayability.
1593: int pixelsPerDataUnit = chromaSubH * chromaSubV;
1594:
1595: int numH = newRect.width / chromaSubH;
1596: int numV = newRect.height / chromaSubV;
1597:
1598: byte[] tempData = new byte[numH * numV
1599: * (pixelsPerDataUnit + 2)];
1600: System
1601: .arraycopy(bdata, 0, tempData, 0,
1602: tempData.length);
1603:
1604: int samplesPerDataUnit = pixelsPerDataUnit * 3;
1605: int[] pixels = new int[samplesPerDataUnit];
1606:
1607: int bOffset = 0;
1608: int offsetCb = pixelsPerDataUnit;
1609: int offsetCr = offsetCb + 1;
1610:
1611: int y = newRect.y;
1612: for (int j = 0; j < numV; j++) {
1613: int x = newRect.x;
1614: for (int i = 0; i < numH; i++) {
1615: int Cb = tempData[bOffset + offsetCb];
1616: int Cr = tempData[bOffset + offsetCr];
1617: int k = 0;
1618: while (k < samplesPerDataUnit) {
1619: pixels[k++] = tempData[bOffset++];
1620: pixels[k++] = Cb;
1621: pixels[k++] = Cr;
1622: }
1623: bOffset += 2;
1624: tile.setPixels(x, y, chromaSubH, chromaSubV,
1625: pixels);
1626: x += chromaSubH;
1627: }
1628: y += chromaSubV;
1629: }
1630:
1631: break;
1632: }
1633: }
1634:
1635: return tile;
1636: }
1637:
1638: private void readShorts(int shortCount, short[] shortArray) {
1639:
1640: // Since each short consists of 2 bytes, we need a
1641: // byte array of double size
1642: int byteCount = 2 * shortCount;
1643: byte[] byteArray = new byte[byteCount];
1644:
1645: try {
1646: stream.readFully(byteArray, 0, byteCount);
1647: } catch (IOException ioe) {
1648: throw new RuntimeException("TIFFImage13");
1649: }
1650:
1651: interpretBytesAsShorts(byteArray, shortArray, shortCount);
1652: }
1653:
1654: private void readInts(int intCount, int[] intArray) {
1655:
1656: // Since each int consists of 4 bytes, we need a
1657: // byte array of quadruple size
1658: int byteCount = 4 * intCount;
1659: byte[] byteArray = new byte[byteCount];
1660:
1661: try {
1662: stream.readFully(byteArray, 0, byteCount);
1663: } catch (IOException ioe) {
1664: throw new RuntimeException("TIFFImage13");
1665: }
1666:
1667: interpretBytesAsInts(byteArray, intArray, intCount);
1668: }
1669:
1670: // Method to interpret a byte array to a short array, depending on
1671: // whether the bytes are stored in a big endian or little endian format.
1672: private void interpretBytesAsShorts(byte[] byteArray,
1673: short[] shortArray, int shortCount) {
1674:
1675: int j = 0;
1676: int firstByte, secondByte;
1677:
1678: if (isBigEndian) {
1679:
1680: for (int i = 0; i < shortCount; i++) {
1681: firstByte = byteArray[j++] & 0xff;
1682: secondByte = byteArray[j++] & 0xff;
1683: shortArray[i] = (short) ((firstByte << 8) + secondByte);
1684: }
1685:
1686: } else {
1687:
1688: for (int i = 0; i < shortCount; i++) {
1689: firstByte = byteArray[j++] & 0xff;
1690: secondByte = byteArray[j++] & 0xff;
1691: shortArray[i] = (short) ((secondByte << 8) + firstByte);
1692: }
1693: }
1694: }
1695:
1696: // Method to interpret a byte array to a int array, depending on
1697: // whether the bytes are stored in a big endian or little endian format.
1698: private void interpretBytesAsInts(byte[] byteArray, int[] intArray,
1699: int intCount) {
1700:
1701: int j = 0;
1702:
1703: if (isBigEndian) {
1704:
1705: for (int i = 0; i < intCount; i++) {
1706: intArray[i] = (((byteArray[j++] & 0xff) << 24)
1707: | ((byteArray[j++] & 0xff) << 16)
1708: | ((byteArray[j++] & 0xff) << 8) | (byteArray[j++] & 0xff));
1709: }
1710:
1711: } else {
1712:
1713: for (int i = 0; i < intCount; i++) {
1714: intArray[i] = ((byteArray[j++] & 0xff)
1715: | ((byteArray[j++] & 0xff) << 8)
1716: | ((byteArray[j++] & 0xff) << 16) | ((byteArray[j++] & 0xff) << 24));
1717: }
1718: }
1719: }
1720:
1721: // Uncompress packbits compressed image data.
1722: private byte[] decodePackbits(byte[] data, int arraySize, byte[] dst) {
1723:
1724: if (dst == null) {
1725: dst = new byte[arraySize];
1726: }
1727:
1728: int srcCount = 0, dstCount = 0;
1729: byte repeat, b;
1730:
1731: try {
1732:
1733: while (dstCount < arraySize) {
1734:
1735: b = data[srcCount++];
1736:
1737: if (b >= 0 && b <= 127) {
1738:
1739: // literal run packet
1740: for (int i = 0; i < (b + 1); i++) {
1741: dst[dstCount++] = data[srcCount++];
1742: }
1743:
1744: } else if (b <= -1 && b >= -127) {
1745:
1746: // 2 byte encoded run packet
1747: repeat = data[srcCount++];
1748: for (int i = 0; i < (-b + 1); i++) {
1749: dst[dstCount++] = repeat;
1750: }
1751:
1752: } else {
1753: // no-op packet. Do nothing
1754: srcCount++;
1755: }
1756: }
1757: } catch (java.lang.ArrayIndexOutOfBoundsException ae) {
1758: throw new RuntimeException("TIFFImage14");
1759: }
1760:
1761: return dst;
1762: }
1763:
1764: // Need a createColorModel().
1765: // Create ComponentColorModel for TYPE_RGB images
1766: private ComponentColorModel createAlphaComponentColorModel(
1767: int dataType, int numBands, boolean isAlphaPremultiplied,
1768: int transparency) {
1769:
1770: ComponentColorModel ccm = null;
1771: int[] RGBBits = null;
1772: ColorSpace cs = null;
1773: switch (numBands) {
1774: case 2: // gray+alpha
1775: cs = ColorSpace.getInstance(ColorSpace.CS_GRAY);
1776: break;
1777: case 4: // RGB+alpha
1778: cs = ColorSpace.getInstance(ColorSpace.CS_sRGB);
1779: break;
1780: default:
1781: throw new IllegalArgumentException();
1782: }
1783:
1784: int componentSize = 0;
1785: switch (dataType) {
1786: case DataBuffer.TYPE_BYTE:
1787: componentSize = 8;
1788: break;
1789: case DataBuffer.TYPE_USHORT:
1790: case DataBuffer.TYPE_SHORT:
1791: componentSize = 16;
1792: break;
1793: case DataBuffer.TYPE_INT:
1794: componentSize = 32;
1795: break;
1796: default:
1797: throw new IllegalArgumentException();
1798: }
1799:
1800: RGBBits = new int[numBands];
1801: for (int i = 0; i < numBands; i++) {
1802: RGBBits[i] = componentSize;
1803: }
1804:
1805: ccm = new ComponentColorModel(cs, RGBBits, true,
1806: isAlphaPremultiplied, transparency, dataType);
1807:
1808: return ccm;
1809: }
1810:
1811: }
|