Source Code Cross Referenced for TIFFImage.java in  » 6.0-JDK-Modules » Java-Advanced-Imaging » com » sun » media » jai » codecimpl » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » 6.0 JDK Modules » Java Advanced Imaging » com.sun.media.jai.codecimpl 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


0001:        /*
0002:         * $RCSfile: TIFFImage.java,v $
0003:         *
0004:         * Copyright (c) 2005 Sun Microsystems, Inc. All rights reserved.
0005:         *
0006:         * Use is subject to license terms.
0007:         *
0008:         * $Revision: 1.6 $
0009:         * $Date: 2006/02/17 17:59:15 $
0010:         * $State: Exp $
0011:         */
0012:        package com.sun.media.jai.codecimpl;
0013:
0014:        import java.awt.Point;
0015:        import java.awt.Rectangle;
0016:        import java.awt.RenderingHints;
0017:        import java.awt.Transparency;
0018:        import java.awt.color.ColorSpace;
0019:        import java.awt.image.ColorModel;
0020:        import java.awt.image.DataBuffer;
0021:        import java.awt.image.DataBufferByte;
0022:        import java.awt.image.DataBufferShort;
0023:        import java.awt.image.DataBufferUShort;
0024:        import java.awt.image.DataBufferInt;
0025:        import java.awt.image.Raster;
0026:        import java.awt.image.WritableRaster;
0027:        import java.awt.image.RenderedImage;
0028:        import java.awt.image.SampleModel;
0029:        import java.awt.image.IndexColorModel;
0030:        import java.awt.image.MultiPixelPackedSampleModel;
0031:        import java.awt.image.PixelInterleavedSampleModel;
0032:        import java.awt.image.ComponentColorModel;
0033:        import java.io.ByteArrayInputStream;
0034:        import java.io.File;
0035:        import java.io.InputStream;
0036:        import java.io.IOException;
0037:        import java.lang.reflect.Constructor;
0038:        import java.lang.reflect.Method;
0039:        import java.text.MessageFormat;
0040:        import java.util.Locale;
0041:        import java.util.zip.DataFormatException;
0042:        import java.util.zip.Inflater;
0043:        import com.sun.media.jai.codec.ImageCodec;
0044:        import com.sun.media.jai.codec.ImageDecoder;
0045:        import com.sun.media.jai.codec.ImageDecoderImpl;
0046:        import com.sun.media.jai.codec.ImageDecodeParam;
0047:        import com.sun.media.jai.codec.SeekableStream;
0048:        import com.sun.media.jai.codec.TIFFDecodeParam;
0049:        import com.sun.media.jai.codec.TIFFDirectory;
0050:        import com.sun.media.jai.codec.TIFFField;
0051:        import com.sun.media.jai.codecimpl.util.DataBufferFloat;
0052:        import com.sun.media.jai.codecimpl.util.FloatDoubleColorModel;
0053:        import com.sun.media.jai.codecimpl.util.RasterFactory;
0054:        import com.sun.image.codec.jpeg.JPEGCodec;
0055:        import com.sun.image.codec.jpeg.JPEGDecodeParam;
0056:        import com.sun.image.codec.jpeg.JPEGImageDecoder;
0057:        import com.sun.media.jai.codecimpl.ImagingListenerProxy;
0058:        import com.sun.media.jai.codecimpl.util.ImagingException;
0059:        import com.sun.media.jai.util.SimpleCMYKColorSpace;
0060:
0061:        public class TIFFImage extends SimpleRenderedImage {
0062:
0063:            // Compression types
0064:            public static final int COMP_NONE = 1;
0065:            public static final int COMP_FAX_G3_1D = 2;
0066:            public static final int COMP_FAX_G3_2D = 3;
0067:            public static final int COMP_FAX_G4_2D = 4;
0068:            public static final int COMP_LZW = 5;
0069:            public static final int COMP_JPEG_OLD = 6;
0070:            public static final int COMP_JPEG_TTN2 = 7;
0071:            public static final int COMP_PACKBITS = 32773;
0072:            public static final int COMP_DEFLATE = 32946;
0073:
0074:            // Image types
0075:            private static final int TYPE_UNSUPPORTED = -1;
0076:            private static final int TYPE_BILEVEL = 0;
0077:            private static final int TYPE_GRAY_4BIT = 1;
0078:            private static final int TYPE_GRAY = 2;
0079:            private static final int TYPE_GRAY_ALPHA = 3;
0080:            private static final int TYPE_PALETTE = 4;
0081:            private static final int TYPE_RGB = 5;
0082:            private static final int TYPE_RGB_ALPHA = 6;
0083:            private static final int TYPE_YCBCR_SUB = 7;
0084:            private static final int TYPE_GENERIC = 8;
0085:            private static final int TYPE_CMYK = 9;
0086:
0087:            // Incidental tags
0088:            private static final int TIFF_JPEG_TABLES = 347;
0089:            private static final int TIFF_YCBCR_SUBSAMPLING = 530;
0090:
0091:            SeekableStream stream;
0092:            private boolean isTiled;
0093:            int tileSize;
0094:            int tilesX, tilesY;
0095:            long[] tileOffsets;
0096:            long[] tileByteCounts;
0097:            char[] colormap;
0098:            int sampleSize;
0099:            int compression;
0100:            byte[] palette;
0101:            int numBands;
0102:
0103:            int chromaSubH;
0104:            int chromaSubV;
0105:
0106:            // Fax compression related variables
0107:            long tiffT4Options;
0108:            long tiffT6Options;
0109:            int fillOrder;
0110:
0111:            // LZW compression related variable
0112:            int predictor;
0113:
0114:            // TTN2 JPEG related variables
0115:            JPEGDecodeParam decodeParam = null;
0116:            boolean colorConvertJPEG = false;
0117:
0118:            // DEFLATE variables
0119:            Inflater inflater = null;
0120:
0121:            // Endian-ness indicator
0122:            boolean isBigEndian;
0123:
0124:            int imageType;
0125:            boolean isWhiteZero = false;
0126:            int dataType;
0127:
0128:            boolean decodePaletteAsShorts;
0129:
0130:            // Decoders
0131:            private TIFFFaxDecoder decoder = null;
0132:            private TIFFLZWDecoder lzwDecoder = null;
0133:
0134:            /**
0135:             * Decode a buffer of data into a Raster with the specified location.
0136:             *
0137:             * @param data buffer contain an interchange or abbreviated datastream.
0138:             * @param decodeParam decoding parameters; may be null unless the
0139:             *        data buffer contains an abbreviated datastream in which case
0140:             *        it may not be null or an error will occur.
0141:             * @param colorConvert whether to perform color conversion; in this
0142:             *        case that would be limited to YCbCr-to-RGB.
0143:             * @param minX the X position of the returned Raster.
0144:             * @param minY the Y position of the returned Raster.
0145:             */
0146:            private static final Raster decodeJPEG(byte[] data,
0147:                    JPEGDecodeParam decodeParam, boolean colorConvert,
0148:                    int minX, int minY) {
0149:                // Create an InputStream from the compressed data array.
0150:                ByteArrayInputStream jpegStream = new ByteArrayInputStream(data);
0151:
0152:                // Create a decoder.
0153:                JPEGImageDecoder decoder = decodeParam == null ? JPEGCodec
0154:                        .createJPEGDecoder(jpegStream) : JPEGCodec
0155:                        .createJPEGDecoder(jpegStream, decodeParam);
0156:
0157:                // Decode the compressed data into a Raster.
0158:                Raster jpegRaster = null;
0159:                try {
0160:                    jpegRaster = colorConvert ? decoder.decodeAsBufferedImage()
0161:                            .getWritableTile(0, 0) : decoder.decodeAsRaster();
0162:                } catch (IOException ioe) {
0163:                    String message = JaiI18N.getString("TIFFImage13");
0164:                    ImagingListenerProxy.errorOccurred(message,
0165:                            new ImagingException(message, ioe),
0166:                            TIFFImage.class, false);
0167:                    //            throw new RuntimeException(JaiI18N.getString("TIFFImage13"));
0168:                }
0169:
0170:                // Translate the decoded Raster to the specified location and return.
0171:                return jpegRaster.createTranslatedChild(minX, minY);
0172:            }
0173:
0174:            /**
0175:             * Inflates <code>deflated</code> into <code>inflated</code> using the
0176:             * <code>Inflater</code> constructed during class instantiation.
0177:             */
0178:            private final void inflate(byte[] deflated, byte[] inflated) {
0179:                inflater.setInput(deflated);
0180:                try {
0181:                    inflater.inflate(inflated);
0182:                } catch (DataFormatException dfe) {
0183:                    String message = JaiI18N.getString("TIFFImage17");
0184:                    ImagingListenerProxy.errorOccurred(message,
0185:                            new ImagingException(message, dfe), this , false);
0186:                    //            throw new RuntimeException(JaiI18N.getString("TIFFImage17")+": "+
0187:                    //                                       dfe.getMessage());
0188:                }
0189:                inflater.reset();
0190:            }
0191:
0192:            /**
0193:             * Creates a pixel-interleaved <code>SampleModel</code>. This is a hack
0194:             * to work around a cast exception when using JAI with float data.
0195:             */
0196:            private final static SampleModel createPixelInterleavedSampleModel(
0197:                    int dataType, int tileWidth, int tileHeight,
0198:                    int pixelStride, int scanlineStride, int bandOffsets[]) {
0199:                SampleModel sampleModel = null;
0200:
0201:                if (dataType == DataBuffer.TYPE_FLOAT) {
0202:                    // This is a hack to make this work with JAI which in some
0203:                    // cases downcasts the DataBuffer to a type-specific class.
0204:                    // In the case of float data this current means the JAI class
0205:                    // javax.media.jai.DataBufferFloat.
0206:                    try {
0207:                        Class rfClass = Class
0208:                                .forName("javax.media.jai.RasterFactory");
0209:                        Class[] paramTypes = new Class[] { int.class,
0210:                                int.class, int.class, int.class, int.class,
0211:                                int[].class };
0212:                        Method rfMthd = rfClass
0213:                                .getMethod("createPixelInterleavedSampleModel",
0214:                                        paramTypes);
0215:                        Object[] params = new Object[] { new Integer(dataType),
0216:                                new Integer(tileWidth),
0217:                                new Integer(tileHeight),
0218:                                new Integer(pixelStride),
0219:                                new Integer(scanlineStride), bandOffsets };
0220:                        sampleModel = (SampleModel) rfMthd.invoke(null, params);
0221:                    } catch (Exception e) {
0222:                        // Deliberately ignore the Exception.
0223:                    }
0224:                }
0225:
0226:                // Create a SampleModel for non-float data or, in the case of
0227:                // float data, if it is still null. This latter case should occur
0228:                // if and only if the decoder is being used without JAI.
0229:                if (dataType != DataBuffer.TYPE_FLOAT || sampleModel == null) {
0230:                    sampleModel = RasterFactory
0231:                            .createPixelInterleavedSampleModel(dataType,
0232:                                    tileWidth, tileHeight, pixelStride,
0233:                                    scanlineStride, bandOffsets);
0234:                }
0235:
0236:                return sampleModel;
0237:            }
0238:
0239:            /**
0240:             * Return as a long[] the value of a TIFF_LONG or TIFF_SHORT field.
0241:             */
0242:            private final long[] getFieldAsLongs(TIFFField field) {
0243:                long[] value = null;
0244:
0245:                if (field.getType() == TIFFField.TIFF_SHORT) {
0246:                    char[] charValue = field.getAsChars();
0247:                    value = new long[charValue.length];
0248:                    for (int i = 0; i < charValue.length; i++) {
0249:                        value[i] = charValue[i] & 0xffff;
0250:                    }
0251:                } else if (field.getType() == TIFFField.TIFF_LONG) {
0252:                    value = field.getAsLongs();
0253:                } else {
0254:                    throw new RuntimeException();
0255:                }
0256:
0257:                return value;
0258:            }
0259:
0260:            /*
0261:             * Check whether the specified tag exists in the specified
0262:             * TIFFDirectory. If not, throw an error message. Otherwise
0263:             * return the TIFFField.
0264:             */
0265:            private TIFFField getField(TIFFDirectory dir, int tagID,
0266:                    String tagName) {
0267:                TIFFField field = dir.getField(tagID);
0268:                if (field == null) {
0269:                    MessageFormat mf = new MessageFormat(JaiI18N
0270:                            .getString("TIFFImage5"));
0271:                    mf.setLocale(Locale.getDefault());
0272:                    throw new RuntimeException(mf
0273:                            .format(new Object[] { tagName }));
0274:                } else {
0275:                    return field;
0276:                }
0277:            }
0278:
0279:            /**
0280:             * Constructs a TIFFImage that acquires its data from a given
0281:             * SeekableStream and reads from a particular IFD of the stream.
0282:             * The index of the first IFD is 0.
0283:             *
0284:             * @param stream the SeekableStream to read from.
0285:             * @param param an instance of TIFFDecodeParam, or null.
0286:             * @param directory the index of the IFD to read from.
0287:             */
0288:            public TIFFImage(SeekableStream stream, TIFFDecodeParam param,
0289:                    int directory) throws IOException {
0290:
0291:                this .stream = stream;
0292:                if (param == null) {
0293:                    param = new TIFFDecodeParam();
0294:                }
0295:
0296:                decodePaletteAsShorts = param.getDecodePaletteAsShorts();
0297:
0298:                // Read the specified directory.
0299:                TIFFDirectory dir = param.getIFDOffset() == null ? new TIFFDirectory(
0300:                        stream, directory)
0301:                        : new TIFFDirectory(stream, param.getIFDOffset()
0302:                                .longValue(), directory);
0303:
0304:                // Set a property "tiff_directory".
0305:                properties.put("tiff_directory", dir);
0306:
0307:                // Get the number of samples per pixel
0308:                TIFFField sfield = dir
0309:                        .getField(TIFFImageDecoder.TIFF_SAMPLES_PER_PIXEL);
0310:                int samplesPerPixel = sfield == null ? 1 : (int) sfield
0311:                        .getAsLong(0);
0312:
0313:                // Read the TIFF_PLANAR_CONFIGURATION field
0314:                TIFFField planarConfigurationField = dir
0315:                        .getField(TIFFImageDecoder.TIFF_PLANAR_CONFIGURATION);
0316:                char[] planarConfiguration = planarConfigurationField == null ? new char[] { 1 }
0317:                        : planarConfigurationField.getAsChars();
0318:
0319:                // Support planar format (band sequential) only for 1 sample/pixel.
0320:                if (planarConfiguration[0] != 1 && samplesPerPixel != 1) {
0321:                    throw new RuntimeException(JaiI18N.getString("TIFFImage0"));
0322:                }
0323:
0324:                // Read the TIFF_BITS_PER_SAMPLE field
0325:                TIFFField bitsField = dir
0326:                        .getField(TIFFImageDecoder.TIFF_BITS_PER_SAMPLE);
0327:                char[] bitsPerSample = null;
0328:                if (bitsField != null) {
0329:                    bitsPerSample = bitsField.getAsChars();
0330:                } else {
0331:                    bitsPerSample = new char[] { 1 };
0332:
0333:                    // Ensure that all samples have the same bit depth.
0334:                    for (int i = 1; i < bitsPerSample.length; i++) {
0335:                        if (bitsPerSample[i] != bitsPerSample[0]) {
0336:                            throw new RuntimeException(JaiI18N
0337:                                    .getString("TIFFImage1"));
0338:                        }
0339:                    }
0340:                }
0341:                sampleSize = (int) bitsPerSample[0];
0342:
0343:                // Read the TIFF_SAMPLE_FORMAT tag to see whether the data might be
0344:                // signed or floating point
0345:                TIFFField sampleFormatField = dir
0346:                        .getField(TIFFImageDecoder.TIFF_SAMPLE_FORMAT);
0347:
0348:                char[] sampleFormat = null;
0349:                if (sampleFormatField != null) {
0350:                    sampleFormat = sampleFormatField.getAsChars();
0351:
0352:                    // Check that all the samples have the same format
0353:                    for (int l = 1; l < sampleFormat.length; l++) {
0354:                        if (sampleFormat[l] != sampleFormat[0]) {
0355:                            throw new RuntimeException(JaiI18N
0356:                                    .getString("TIFFImage2"));
0357:                        }
0358:                    }
0359:
0360:                } else {
0361:                    sampleFormat = new char[] { 1 };
0362:                }
0363:
0364:                // Set the data type based on the sample size and format.
0365:                boolean isValidDataFormat = false;
0366:                switch (sampleSize) {
0367:                case 1:
0368:                case 4:
0369:                case 8:
0370:                    if (sampleFormat[0] != 3) {
0371:                        // Ignore whether signed or unsigned: treat all as unsigned.
0372:                        dataType = DataBuffer.TYPE_BYTE;
0373:                        isValidDataFormat = true;
0374:                    }
0375:                    break;
0376:                case 16:
0377:                    if (sampleFormat[0] != 3) {
0378:                        dataType = sampleFormat[0] == 2 ? DataBuffer.TYPE_SHORT
0379:                                : DataBuffer.TYPE_USHORT;
0380:                        isValidDataFormat = true;
0381:                    }
0382:                    break;
0383:                case 32:
0384:                    dataType = sampleFormat[0] == 3 ? DataBuffer.TYPE_FLOAT
0385:                            : DataBuffer.TYPE_INT;
0386:                    isValidDataFormat = true;
0387:                    break;
0388:                }
0389:
0390:                if (!isValidDataFormat) {
0391:                    throw new RuntimeException(JaiI18N.getString("TIFFImage3"));
0392:                }
0393:
0394:                // Figure out what compression if any, is being used.
0395:                TIFFField compField = dir
0396:                        .getField(TIFFImageDecoder.TIFF_COMPRESSION);
0397:                compression = compField == null ? COMP_NONE : compField
0398:                        .getAsInt(0);
0399:
0400:                // Get the photometric interpretation field.
0401:                TIFFField photoInterpField = dir
0402:                        .getField(TIFFImageDecoder.TIFF_PHOTOMETRIC_INTERPRETATION);
0403:
0404:                // Set the photometric interpretation variable.
0405:                int photometricType;
0406:                if (photoInterpField != null) {
0407:                    // Set the variable from the photometric interpretation field.
0408:                    photometricType = (int) photoInterpField.getAsLong(0);
0409:                } else {
0410:                    // The photometric interpretation field is missing; attempt
0411:                    // to infer the type from other information.
0412:                    if (dir.getField(TIFFImageDecoder.TIFF_COLORMAP) != null) {
0413:                        // There is a colormap so most likely a palette color image.
0414:                        photometricType = 3; // RGB Palette
0415:                    } else if (sampleSize == 1) {
0416:                        // Bilevel image so most likely a document; switch based
0417:                        // on the compression type of the image.
0418:                        if (compression == COMP_FAX_G3_1D
0419:                                || compression == COMP_FAX_G3_2D
0420:                                || compression == COMP_FAX_G4_2D) {
0421:                            photometricType = 0; // WhiteIsZero
0422:                        } else {
0423:                            photometricType = 1; // BlackIsZero
0424:                        }
0425:                    } else if (samplesPerPixel == 3 || samplesPerPixel == 4) {
0426:                        // Assume 3 bands is RGB and 4 bands is RGBA.
0427:                        photometricType = 2; // RGB
0428:                    } else {
0429:                        // Default to multi-band grayscale.
0430:                        photometricType = 1; // BlackIsZero
0431:                    }
0432:                }
0433:
0434:                // Determine which kind of image we are dealing with.
0435:                imageType = TYPE_UNSUPPORTED;
0436:                switch (photometricType) {
0437:                case 0: // WhiteIsZero
0438:                    isWhiteZero = true;
0439:                case 1: // BlackIsZero
0440:                    if (sampleSize == 1 && samplesPerPixel == 1) {
0441:                        imageType = TYPE_BILEVEL;
0442:                    } else if (sampleSize == 4 && samplesPerPixel == 1) {
0443:                        imageType = TYPE_GRAY_4BIT;
0444:                    } else if (sampleSize % 8 == 0) {
0445:                        if (samplesPerPixel == 1) {
0446:                            imageType = TYPE_GRAY;
0447:                        } else if (samplesPerPixel == 2) {
0448:                            imageType = TYPE_GRAY_ALPHA;
0449:                        } else {
0450:                            imageType = TYPE_GENERIC;
0451:                        }
0452:                    }
0453:                    break;
0454:                case 2: // RGB
0455:                    if (sampleSize % 8 == 0) {
0456:                        if (samplesPerPixel == 3) {
0457:                            imageType = TYPE_RGB;
0458:                        } else if (samplesPerPixel == 4) {
0459:                            imageType = TYPE_RGB_ALPHA;
0460:                        } else {
0461:                            imageType = TYPE_GENERIC;
0462:                        }
0463:                    }
0464:                    break;
0465:                case 3: // RGB Palette
0466:                    if (samplesPerPixel == 1
0467:                            && (sampleSize == 4 || sampleSize == 8 || sampleSize == 16)) {
0468:                        imageType = TYPE_PALETTE;
0469:                    }
0470:                    break;
0471:                case 4: // Transparency mask
0472:                    if (sampleSize == 1 && samplesPerPixel == 1) {
0473:                        imageType = TYPE_BILEVEL;
0474:                    }
0475:                    break;
0476:                case 5: // Separated image, usually CMYK
0477:                    if (sampleSize == 8 && samplesPerPixel == 4) {
0478:                        imageType = TYPE_CMYK;
0479:                    }
0480:                case 6: // YCbCr
0481:                    if (compression == COMP_JPEG_TTN2 && sampleSize == 8
0482:                            && samplesPerPixel == 3) {
0483:                        // Set color conversion flag.
0484:                        colorConvertJPEG = param.getJPEGDecompressYCbCrToRGB();
0485:
0486:                        // Set type to RGB if color converting.
0487:                        imageType = colorConvertJPEG ? TYPE_RGB : TYPE_GENERIC;
0488:                    } else {
0489:                        TIFFField chromaField = dir
0490:                                .getField(TIFF_YCBCR_SUBSAMPLING);
0491:                        if (chromaField != null) {
0492:                            chromaSubH = chromaField.getAsInt(0);
0493:                            chromaSubV = chromaField.getAsInt(1);
0494:                        } else {
0495:                            chromaSubH = chromaSubV = 2;
0496:                        }
0497:
0498:                        if (chromaSubH * chromaSubV == 1) {
0499:                            imageType = TYPE_GENERIC;
0500:                        } else if (sampleSize == 8 && samplesPerPixel == 3) {
0501:                            imageType = TYPE_YCBCR_SUB;
0502:                        }
0503:                    }
0504:                    break;
0505:                default: // Other including CIE L*a*b*, unknown.
0506:                    if (sampleSize % 8 == 0) {
0507:                        imageType = TYPE_GENERIC;
0508:                    }
0509:                }
0510:
0511:                // Bail out if not one of the supported types.
0512:                if (imageType == TYPE_UNSUPPORTED) {
0513:                    throw new RuntimeException(JaiI18N.getString("TIFFImage4"));
0514:                }
0515:
0516:                // Set basic image layout
0517:                minX = minY = 0;
0518:                width = (int) (getField(dir, TIFFImageDecoder.TIFF_IMAGE_WIDTH,
0519:                        "Image Width").getAsLong(0));
0520:
0521:                height = (int) (getField(dir,
0522:                        TIFFImageDecoder.TIFF_IMAGE_LENGTH, "Image Length")
0523:                        .getAsLong(0));
0524:
0525:                // Set a preliminary band count. This may be changed later as needed.
0526:                numBands = samplesPerPixel;
0527:
0528:                // Figure out if any extra samples are present.
0529:                TIFFField efield = dir
0530:                        .getField(TIFFImageDecoder.TIFF_EXTRA_SAMPLES);
0531:                int extraSamples = efield == null ? 0 : (int) efield
0532:                        .getAsLong(0);
0533:
0534:                if (dir.getField(TIFFImageDecoder.TIFF_TILE_OFFSETS) != null) {
0535:                    // Image is in tiled format
0536:                    isTiled = true;
0537:
0538:                    tileWidth = (int) (getField(dir,
0539:                            TIFFImageDecoder.TIFF_TILE_WIDTH, "Tile Width")
0540:                            .getAsLong(0));
0541:                    tileHeight = (int) (getField(dir,
0542:                            TIFFImageDecoder.TIFF_TILE_LENGTH, "Tile Length")
0543:                            .getAsLong(0));
0544:                    tileOffsets = (getField(dir,
0545:                            TIFFImageDecoder.TIFF_TILE_OFFSETS, "Tile Offsets"))
0546:                            .getAsLongs();
0547:
0548:                    tileByteCounts = getFieldAsLongs(getField(dir,
0549:                            TIFFImageDecoder.TIFF_TILE_BYTE_COUNTS,
0550:                            "Tile Byte Counts"));
0551:
0552:                } else {
0553:
0554:                    // Image is in stripped format, looks like tiles to us
0555:                    isTiled = false;
0556:
0557:                    // Note: Some legacy files may have tile width and height
0558:                    // written but use the strip offsets and byte counts fields
0559:                    // instead of the tile offsets and byte counts. Therefore
0560:                    // we default here to the tile dimensions if they are written.
0561:                    tileWidth = dir.getField(TIFFImageDecoder.TIFF_TILE_WIDTH) != null ? (int) dir
0562:                            .getFieldAsLong(TIFFImageDecoder.TIFF_TILE_WIDTH)
0563:                            : width;
0564:                    TIFFField field = dir
0565:                            .getField(TIFFImageDecoder.TIFF_ROWS_PER_STRIP);
0566:                    if (field == null) {
0567:                        // Default is infinity (2^32 -1), basically the entire image
0568:                        // TODO: Can do a better job of tiling here
0569:                        tileHeight = dir
0570:                                .getField(TIFFImageDecoder.TIFF_TILE_LENGTH) != null ? (int) dir
0571:                                .getFieldAsLong(TIFFImageDecoder.TIFF_TILE_LENGTH)
0572:                                : height;
0573:                    } else {
0574:                        long l = field.getAsLong(0);
0575:                        long infinity = 1;
0576:                        infinity = (infinity << 32) - 1;
0577:                        if (l == infinity || l > height) {
0578:                            // 2^32 - 1 (effectively infinity, entire image is 1 strip)
0579:                            // or RowsPerStrip > ImageLength so clamp as having a tile
0580:                            // larger than the image is pointless.
0581:                            tileHeight = height;
0582:                        } else {
0583:                            tileHeight = (int) l;
0584:                        }
0585:                    }
0586:
0587:                    TIFFField tileOffsetsField = getField(dir,
0588:                            TIFFImageDecoder.TIFF_STRIP_OFFSETS,
0589:                            "Strip Offsets");
0590:                    tileOffsets = getFieldAsLongs(tileOffsetsField);
0591:
0592:                    TIFFField tileByteCountsField = dir
0593:                            .getField(TIFFImageDecoder.TIFF_STRIP_BYTE_COUNTS);
0594:                    if (tileByteCountsField == null) {
0595:                        // Attempt to infer the number of bytes in each strip.
0596:                        int totalBytes = ((sampleSize + 7) / 8) * numBands
0597:                                * width * height;
0598:                        int bytesPerStrip = ((sampleSize + 7) / 8) * numBands
0599:                                * width * tileHeight;
0600:                        int cumulativeBytes = 0;
0601:                        tileByteCounts = new long[tileOffsets.length];
0602:                        for (int i = 0; i < tileOffsets.length; i++) {
0603:                            tileByteCounts[i] = Math.min(totalBytes
0604:                                    - cumulativeBytes, bytesPerStrip);
0605:                            cumulativeBytes += bytesPerStrip;
0606:                        }
0607:
0608:                        if (compression != COMP_NONE) {
0609:                            // Replace the stream with one that will not throw
0610:                            // an EOFException when it runs past the end.
0611:                            this .stream = new NoEOFStream(stream);
0612:                        }
0613:                    } else {
0614:                        tileByteCounts = getFieldAsLongs(tileByteCountsField);
0615:                    }
0616:
0617:                    // Uncompressed image provided in a single tile: clamp to max bytes.
0618:                    int maxBytes = width * height * numBands
0619:                            * ((sampleSize + 7) / 8);
0620:                    if (tileByteCounts.length == 1 && compression == COMP_NONE
0621:                            && tileByteCounts[0] > maxBytes) {
0622:                        tileByteCounts[0] = maxBytes;
0623:                    }
0624:                }
0625:
0626:                // Calculate number of tiles and the tileSize in bytes
0627:                tilesX = (width + tileWidth - 1) / tileWidth;
0628:                tilesY = (height + tileHeight - 1) / tileHeight;
0629:                tileSize = tileWidth * tileHeight * numBands;
0630:
0631:                // Check whether big endian or little endian format is used.
0632:                isBigEndian = dir.isBigEndian();
0633:
0634:                TIFFField fillOrderField = dir
0635:                        .getField(TIFFImageDecoder.TIFF_FILL_ORDER);
0636:                if (fillOrderField != null) {
0637:                    fillOrder = fillOrderField.getAsInt(0);
0638:                } else {
0639:                    // Default Fill Order
0640:                    fillOrder = 1;
0641:                }
0642:
0643:                switch (compression) {
0644:                case COMP_NONE:
0645:                case COMP_PACKBITS:
0646:                    // Do nothing.
0647:                    break;
0648:                case COMP_DEFLATE:
0649:                    inflater = new Inflater();
0650:                    break;
0651:                case COMP_FAX_G3_1D:
0652:                case COMP_FAX_G3_2D:
0653:                case COMP_FAX_G4_2D:
0654:                    if (sampleSize != 1) {
0655:                        throw new RuntimeException(JaiI18N
0656:                                .getString("TIFFImage7"));
0657:                    }
0658:
0659:                    // Fax T.4 compression options
0660:                    if (compression == 3) {
0661:                        TIFFField t4OptionsField = dir
0662:                                .getField(TIFFImageDecoder.TIFF_T4_OPTIONS);
0663:                        if (t4OptionsField != null) {
0664:                            tiffT4Options = t4OptionsField.getAsLong(0);
0665:                        } else {
0666:                            // Use default value
0667:                            tiffT4Options = 0;
0668:                        }
0669:                    }
0670:
0671:                    // Fax T.6 compression options
0672:                    if (compression == 4) {
0673:                        TIFFField t6OptionsField = dir
0674:                                .getField(TIFFImageDecoder.TIFF_T6_OPTIONS);
0675:                        if (t6OptionsField != null) {
0676:                            tiffT6Options = t6OptionsField.getAsLong(0);
0677:                        } else {
0678:                            // Use default value
0679:                            tiffT6Options = 0;
0680:                        }
0681:                    }
0682:
0683:                    // Fax encoding, need to create the Fax decoder.
0684:                    decoder = new TIFFFaxDecoder(fillOrder, tileWidth,
0685:                            tileHeight);
0686:                    break;
0687:
0688:                case COMP_LZW:
0689:                    // LZW compression used, need to create the LZW decoder.
0690:                    TIFFField predictorField = dir
0691:                            .getField(TIFFImageDecoder.TIFF_PREDICTOR);
0692:
0693:                    if (predictorField == null) {
0694:                        predictor = 1;
0695:                    } else {
0696:                        predictor = predictorField.getAsInt(0);
0697:
0698:                        if (predictor != 1 && predictor != 2) {
0699:                            throw new RuntimeException(JaiI18N
0700:                                    .getString("TIFFImage8"));
0701:                        }
0702:
0703:                        if (predictor == 2 && sampleSize != 8) {
0704:                            throw new RuntimeException(sampleSize
0705:                                    + JaiI18N.getString("TIFFImage9"));
0706:                        }
0707:                    }
0708:
0709:                    lzwDecoder = new TIFFLZWDecoder(tileWidth, predictor,
0710:                            samplesPerPixel);
0711:                    break;
0712:
0713:                case COMP_JPEG_OLD:
0714:                    throw new RuntimeException(JaiI18N.getString("TIFFImage15"));
0715:
0716:                case COMP_JPEG_TTN2:
0717:                    if (!(sampleSize == 8 && ((imageType == TYPE_GRAY && samplesPerPixel == 1)
0718:                            || (imageType == TYPE_PALETTE && samplesPerPixel == 1) || (imageType == TYPE_RGB && samplesPerPixel == 3)))) {
0719:                        throw new RuntimeException(JaiI18N
0720:                                .getString("TIFFImage16"));
0721:                    }
0722:
0723:                    // Create decodeParam from JPEGTables field if present.
0724:                    if (dir.isTagPresent(TIFF_JPEG_TABLES)) {
0725:                        TIFFField jpegTableField = dir
0726:                                .getField(TIFF_JPEG_TABLES);
0727:                        byte[] jpegTable = jpegTableField.getAsBytes();
0728:                        ByteArrayInputStream tableStream = new ByteArrayInputStream(
0729:                                jpegTable);
0730:                        JPEGImageDecoder decoder = JPEGCodec
0731:                                .createJPEGDecoder(tableStream);
0732:                        decoder.decodeAsRaster();
0733:                        decodeParam = decoder.getJPEGDecodeParam();
0734:                    }
0735:
0736:                    break;
0737:                default:
0738:                    throw new RuntimeException(JaiI18N.getString("TIFFImage10"));
0739:                }
0740:
0741:                switch (imageType) {
0742:                case TYPE_BILEVEL:
0743:                case TYPE_GRAY_4BIT:
0744:                    sampleModel = new MultiPixelPackedSampleModel(dataType,
0745:                            tileWidth, tileHeight, sampleSize);
0746:                    if (imageType == TYPE_BILEVEL) {
0747:                        byte[] map = new byte[] {
0748:                                (byte) (isWhiteZero ? 255 : 0),
0749:                                (byte) (isWhiteZero ? 0 : 255) };
0750:                        colorModel = new IndexColorModel(1, 2, map, map, map);
0751:                    } else {
0752:                        colorModel = ImageCodec.createGrayIndexColorModel(
0753:                                sampleModel, !isWhiteZero);
0754:                    }
0755:                    break;
0756:
0757:                case TYPE_GRAY:
0758:                case TYPE_GRAY_ALPHA:
0759:                case TYPE_RGB:
0760:                case TYPE_RGB_ALPHA:
0761:                case TYPE_CMYK:
0762:                    // Create a pixel interleaved SampleModel with decreasing
0763:                    // band offsets.
0764:                    int[] RGBOffsets = new int[numBands];
0765:                    if (compression == COMP_JPEG_TTN2) {
0766:                        for (int i = 0; i < numBands; i++) {
0767:                            RGBOffsets[i] = numBands - 1 - i;
0768:                        }
0769:                    } else {
0770:                        for (int i = 0; i < numBands; i++) {
0771:                            RGBOffsets[i] = i;
0772:                        }
0773:                    }
0774:                    sampleModel = createPixelInterleavedSampleModel(dataType,
0775:                            tileWidth, tileHeight, numBands, numBands
0776:                                    * tileWidth, RGBOffsets);
0777:
0778:                    if (imageType == TYPE_GRAY || imageType == TYPE_RGB) {
0779:                        colorModel = ImageCodec
0780:                                .createComponentColorModel(sampleModel);
0781:                    } else if (imageType == TYPE_CMYK) {
0782:                        colorModel = ImageCodec
0783:                                .createComponentColorModel(sampleModel,
0784:                                        SimpleCMYKColorSpace.getInstance());
0785:                    } else { // hasAlpha
0786:                        // Transparency.OPAQUE signifies image data that is
0787:                        // completely opaque, meaning that all pixels have an alpha
0788:                        // value of 1.0. So the extra band gets ignored, which is
0789:                        // what we want.
0790:                        int transparency = Transparency.OPAQUE;
0791:                        if (extraSamples == 1 || extraSamples == 2) {
0792:                            // associated (premultiplied) alpha when == 1
0793:                            // unassociated alpha when ==2
0794:                            // Fix bug: 4699316
0795:                            transparency = Transparency.TRANSLUCENT;
0796:                        }
0797:
0798:                        colorModel = createAlphaComponentColorModel(dataType,
0799:                                numBands, extraSamples == 1, transparency);
0800:                    }
0801:                    break;
0802:
0803:                case TYPE_GENERIC:
0804:                case TYPE_YCBCR_SUB:
0805:                    // For this case we can't display the image, so we create a
0806:                    // SampleModel with increasing bandOffsets, and keep the
0807:                    // ColorModel as null, as there is no appropriate ColorModel.
0808:
0809:                    int[] bandOffsets = new int[numBands];
0810:                    for (int i = 0; i < numBands; i++) {
0811:                        bandOffsets[i] = i;
0812:                    }
0813:
0814:                    sampleModel = createPixelInterleavedSampleModel(dataType,
0815:                            tileWidth, tileHeight, numBands, numBands
0816:                                    * tileWidth, bandOffsets);
0817:                    colorModel = null;
0818:                    break;
0819:
0820:                case TYPE_PALETTE:
0821:                    // Get the colormap
0822:                    TIFFField cfield = getField(dir,
0823:                            TIFFImageDecoder.TIFF_COLORMAP, "Colormap");
0824:                    colormap = cfield.getAsChars();
0825:
0826:                    // Could be either 1 or 3 bands depending on whether we use
0827:                    // IndexColorModel or not.
0828:                    if (decodePaletteAsShorts) {
0829:                        numBands = 3;
0830:
0831:                        // If no SampleFormat tag was specified and if the
0832:                        // sampleSize is less than or equal to 8, then the
0833:                        // dataType was initially set to byte, but now we want to
0834:                        // expand the palette as shorts, so the dataType should
0835:                        // be ushort.
0836:                        if (dataType == DataBuffer.TYPE_BYTE) {
0837:                            dataType = DataBuffer.TYPE_USHORT;
0838:                        }
0839:
0840:                        // Data will have to be unpacked into a 3 band short image
0841:                        // as we do not have a IndexColorModel that can deal with
0842:                        // a colormodel whose entries are of short data type.
0843:                        sampleModel = RasterFactory
0844:                                .createPixelInterleavedSampleModel(dataType,
0845:                                        tileWidth, tileHeight, numBands);
0846:                        colorModel = ImageCodec
0847:                                .createComponentColorModel(sampleModel);
0848:
0849:                    } else {
0850:
0851:                        numBands = 1;
0852:
0853:                        if (sampleSize == 4) {
0854:                            // Pixel data will not be unpacked, will use MPPSM to store
0855:                            // packed data and IndexColorModel to do the unpacking.
0856:                            sampleModel = new MultiPixelPackedSampleModel(
0857:                                    DataBuffer.TYPE_BYTE, tileWidth,
0858:                                    tileHeight, sampleSize);
0859:                        } else if (sampleSize == 8) {
0860:                            sampleModel = RasterFactory
0861:                                    .createPixelInterleavedSampleModel(
0862:                                            DataBuffer.TYPE_BYTE, tileWidth,
0863:                                            tileHeight, numBands);
0864:                        } else if (sampleSize == 16) {
0865:
0866:                            // Here datatype has to be unsigned since we are storing
0867:                            // indices into the IndexColorModel palette. Ofcourse
0868:                            // the actual palette entries are allowed to be negative.
0869:                            dataType = DataBuffer.TYPE_USHORT;
0870:                            sampleModel = RasterFactory
0871:                                    .createPixelInterleavedSampleModel(
0872:                                            DataBuffer.TYPE_USHORT, tileWidth,
0873:                                            tileHeight, numBands);
0874:                        }
0875:
0876:                        int bandLength = colormap.length / 3;
0877:                        byte r[] = new byte[bandLength];
0878:                        byte g[] = new byte[bandLength];
0879:                        byte b[] = new byte[bandLength];
0880:
0881:                        int gIndex = bandLength;
0882:                        int bIndex = bandLength * 2;
0883:
0884:                        if (dataType == DataBuffer.TYPE_SHORT) {
0885:
0886:                            for (int i = 0; i < bandLength; i++) {
0887:                                r[i] = param
0888:                                        .decodeSigned16BitsTo8Bits((short) colormap[i]);
0889:                                g[i] = param
0890:                                        .decodeSigned16BitsTo8Bits((short) colormap[gIndex
0891:                                                + i]);
0892:                                b[i] = param
0893:                                        .decodeSigned16BitsTo8Bits((short) colormap[bIndex
0894:                                                + i]);
0895:                            }
0896:
0897:                        } else {
0898:
0899:                            for (int i = 0; i < bandLength; i++) {
0900:                                r[i] = param
0901:                                        .decode16BitsTo8Bits(colormap[i] & 0xffff);
0902:                                g[i] = param
0903:                                        .decode16BitsTo8Bits(colormap[gIndex
0904:                                                + i] & 0xffff);
0905:                                b[i] = param
0906:                                        .decode16BitsTo8Bits(colormap[bIndex
0907:                                                + i] & 0xffff);
0908:                            }
0909:
0910:                        }
0911:
0912:                        colorModel = new IndexColorModel(sampleSize,
0913:                                bandLength, r, g, b);
0914:                    }
0915:                    break;
0916:
0917:                default:
0918:                    throw new RuntimeException("TIFFImage4");
0919:                }
0920:            }
0921:
0922:            /**
0923:             * Reads a private IFD from a given offset in the stream.  This
0924:             * method may be used to obtain IFDs that are referenced
0925:             * only by private tag values.
0926:             */
0927:            public TIFFDirectory getPrivateIFD(long offset) throws IOException {
0928:                return new TIFFDirectory(stream, offset, 0);
0929:            }
0930:
0931:            /**
0932:             * Returns tile (tileX, tileY) as a Raster.
0933:             */
0934:            public synchronized Raster getTile(int tileX, int tileY) {
0935:                // Check parameters.
0936:                if ((tileX < 0) || (tileX >= tilesX) || (tileY < 0)
0937:                        || (tileY >= tilesY)) {
0938:                    throw new IllegalArgumentException(JaiI18N
0939:                            .getString("TIFFImage12"));
0940:                }
0941:
0942:                // The tile to return.
0943:                WritableRaster tile = null;
0944:
0945:                // Synchronize the rest of the method in case other TIFFImage
0946:                // instances using the same stream were created by the same
0947:                // TIFFImageDecoder. This fixes 4690773.
0948:                synchronized (this .stream) {
0949:
0950:                    // Get the data array out of the DataBuffer
0951:                    byte bdata[] = null;
0952:                    short sdata[] = null;
0953:                    int idata[] = null;
0954:                    float fdata[] = null;
0955:                    DataBuffer buffer = sampleModel.createDataBuffer();
0956:
0957:                    int dataType = sampleModel.getDataType();
0958:                    if (dataType == DataBuffer.TYPE_BYTE) {
0959:                        bdata = ((DataBufferByte) buffer).getData();
0960:                    } else if (dataType == DataBuffer.TYPE_USHORT) {
0961:                        sdata = ((DataBufferUShort) buffer).getData();
0962:                    } else if (dataType == DataBuffer.TYPE_SHORT) {
0963:                        sdata = ((DataBufferShort) buffer).getData();
0964:                    } else if (dataType == DataBuffer.TYPE_INT) {
0965:                        idata = ((DataBufferInt) buffer).getData();
0966:                    } else if (dataType == DataBuffer.TYPE_FLOAT) {
0967:                        if (buffer instanceof  DataBufferFloat) {
0968:                            fdata = ((DataBufferFloat) buffer).getData();
0969:                        } else {
0970:                            // This is a hack to make this work with JAI which in some
0971:                            // cases downcasts the DataBuffer to a type-specific class.
0972:                            // In the case of float data this current means the JAI class
0973:                            // javax.media.jai.DataBufferFloat.
0974:                            try {
0975:                                Method getDataMethod = buffer.getClass()
0976:                                        .getMethod("getData", null);
0977:                                fdata = (float[]) getDataMethod.invoke(buffer,
0978:                                        null);
0979:                            } catch (Exception e) {
0980:                                String message = JaiI18N
0981:                                        .getString("TIFFImage18");
0982:                                ImagingListenerProxy.errorOccurred(message,
0983:                                        new ImagingException(message, e), this ,
0984:                                        false);
0985:                                //                    throw new RuntimeException(JaiI18N.getString("TIFFImage18"));
0986:                            }
0987:                        }
0988:                    }
0989:
0990:                    tile = (WritableRaster) RasterFactory.createWritableRaster(
0991:                            sampleModel, buffer, new Point(tileXToX(tileX),
0992:                                    tileYToY(tileY)));
0993:
0994:                    // Save original file pointer position and seek to tile data location.
0995:                    long save_offset = 0;
0996:                    try {
0997:                        save_offset = stream.getFilePointer();
0998:                        stream.seek(tileOffsets[tileY * tilesX + tileX]);
0999:                    } catch (IOException ioe) {
1000:                        String message = JaiI18N.getString("TIFFImage13");
1001:                        ImagingListenerProxy
1002:                                .errorOccurred(message, new ImagingException(
1003:                                        message, ioe), this , false);
1004:                        //	    throw new RuntimeException(JaiI18N.getString("TIFFImage13"));
1005:                    }
1006:
1007:                    // Number of bytes in this tile (strip) after compression.
1008:                    int byteCount = (int) tileByteCounts[tileY * tilesX + tileX];
1009:
1010:                    // Find out the number of bytes in the current tile. If the image is
1011:                    // tiled this may include pixels which are outside of the image bounds
1012:                    // if the image width and height are not multiples of the tile width
1013:                    // and height respectively.
1014:                    Rectangle tileRect = new Rectangle(tileXToX(tileX),
1015:                            tileYToY(tileY), tileWidth, tileHeight);
1016:                    Rectangle newRect = isTiled ? tileRect : tileRect
1017:                            .intersection(getBounds());
1018:                    int unitsInThisTile = newRect.width * newRect.height
1019:                            * numBands;
1020:
1021:                    // Allocate read buffer if needed.
1022:                    byte data[] = compression != COMP_NONE
1023:                            || imageType == TYPE_PALETTE ? new byte[byteCount]
1024:                            : null;
1025:
1026:                    // Read the data, uncompressing as needed. There are four cases:
1027:                    // bilevel, palette-RGB, 4-bit grayscale, and everything else.
1028:                    if (imageType == TYPE_BILEVEL) { // bilevel
1029:                        try {
1030:                            if (compression == COMP_PACKBITS) {
1031:                                stream.readFully(data, 0, byteCount);
1032:
1033:                                // Since the decompressed data will still be packed
1034:                                // 8 pixels into 1 byte, calculate bytesInThisTile
1035:                                int bytesInThisTile;
1036:                                if ((newRect.width % 8) == 0) {
1037:                                    bytesInThisTile = (newRect.width / 8)
1038:                                            * newRect.height;
1039:                                } else {
1040:                                    bytesInThisTile = (newRect.width / 8 + 1)
1041:                                            * newRect.height;
1042:                                }
1043:                                decodePackbits(data, bytesInThisTile, bdata);
1044:                            } else if (compression == COMP_LZW) {
1045:                                stream.readFully(data, 0, byteCount);
1046:                                lzwDecoder.decode(data, bdata, newRect.height);
1047:                            } else if (compression == COMP_FAX_G3_1D) {
1048:                                stream.readFully(data, 0, byteCount);
1049:                                decoder
1050:                                        .decode1D(bdata, data, 0,
1051:                                                newRect.height);
1052:                            } else if (compression == COMP_FAX_G3_2D) {
1053:                                stream.readFully(data, 0, byteCount);
1054:                                decoder.decode2D(bdata, data, 0,
1055:                                        newRect.height, tiffT4Options);
1056:                            } else if (compression == COMP_FAX_G4_2D) {
1057:                                stream.readFully(data, 0, byteCount);
1058:                                decoder.decodeT6(bdata, data, 0,
1059:                                        newRect.height, tiffT6Options);
1060:                            } else if (compression == COMP_DEFLATE) {
1061:                                stream.readFully(data, 0, byteCount);
1062:                                inflate(data, bdata);
1063:                            } else if (compression == COMP_NONE) {
1064:                                stream.readFully(bdata, 0, byteCount);
1065:                            }
1066:
1067:                            stream.seek(save_offset);
1068:                        } catch (IOException ioe) {
1069:                            String message = JaiI18N.getString("TIFFImage13");
1070:                            ImagingListenerProxy.errorOccurred(message,
1071:                                    new ImagingException(message, ioe), this ,
1072:                                    false);
1073:                            //		throw new RuntimeException(JaiI18N.getString("TIFFImage13"));
1074:                        }
1075:                    } else if (imageType == TYPE_PALETTE) { // palette-RGB
1076:                        if (sampleSize == 16) {
1077:
1078:                            if (decodePaletteAsShorts) {
1079:
1080:                                short tempData[] = null;
1081:
1082:                                // At this point the data is 1 banded and will
1083:                                // become 3 banded only after we've done the palette
1084:                                // lookup, since unitsInThisTile was calculated with
1085:                                // 3 bands, we need to divide this by 3.
1086:                                int unitsBeforeLookup = unitsInThisTile / 3;
1087:
1088:                                // Since unitsBeforeLookup is the number of shorts,
1089:                                // but we do our decompression in terms of bytes, we
1090:                                // need to multiply it by 2 in order to figure out
1091:                                // how many bytes we'll get after decompression.
1092:                                int entries = unitsBeforeLookup * 2;
1093:
1094:                                // Read the data, if compressed, decode it, reset the pointer
1095:                                try {
1096:
1097:                                    if (compression == COMP_PACKBITS) {
1098:
1099:                                        stream.readFully(data, 0, byteCount);
1100:
1101:                                        byte byteArray[] = new byte[entries];
1102:                                        decodePackbits(data, entries, byteArray);
1103:                                        tempData = new short[unitsBeforeLookup];
1104:                                        interpretBytesAsShorts(byteArray,
1105:                                                tempData, unitsBeforeLookup);
1106:
1107:                                    } else if (compression == COMP_LZW) {
1108:
1109:                                        // Read in all the compressed data for this tile
1110:                                        stream.readFully(data, 0, byteCount);
1111:
1112:                                        byte byteArray[] = new byte[entries];
1113:                                        lzwDecoder.decode(data, byteArray,
1114:                                                newRect.height);
1115:                                        tempData = new short[unitsBeforeLookup];
1116:                                        interpretBytesAsShorts(byteArray,
1117:                                                tempData, unitsBeforeLookup);
1118:
1119:                                    } else if (compression == COMP_DEFLATE) {
1120:
1121:                                        stream.readFully(data, 0, byteCount);
1122:                                        byte byteArray[] = new byte[entries];
1123:                                        inflate(data, byteArray);
1124:                                        tempData = new short[unitsBeforeLookup];
1125:                                        interpretBytesAsShorts(byteArray,
1126:                                                tempData, unitsBeforeLookup);
1127:
1128:                                    } else if (compression == COMP_NONE) {
1129:
1130:                                        // byteCount tells us how many bytes are there
1131:                                        // in this tile, but we need to read in shorts,
1132:                                        // which will take half the space, so while
1133:                                        // allocating we divide byteCount by 2.
1134:                                        tempData = new short[byteCount / 2];
1135:                                        readShorts(byteCount / 2, tempData);
1136:                                    }
1137:
1138:                                    stream.seek(save_offset);
1139:
1140:                                } catch (IOException ioe) {
1141:                                    String message = JaiI18N
1142:                                            .getString("TIFFImage13");
1143:                                    ImagingListenerProxy.errorOccurred(message,
1144:                                            new ImagingException(message, ioe),
1145:                                            this , false);
1146:                                    //			throw new RuntimeException(
1147:                                    //					JaiI18N.getString("TIFFImage13"));
1148:                                }
1149:
1150:                                if (dataType == DataBuffer.TYPE_USHORT) {
1151:
1152:                                    // Expand the palette image into an rgb image with ushort
1153:                                    // data type.
1154:                                    int cmapValue;
1155:                                    int count = 0, lookup, len = colormap.length / 3;
1156:                                    int len2 = len * 2;
1157:                                    for (int i = 0; i < unitsBeforeLookup; i++) {
1158:                                        // Get the index into the colormap
1159:                                        lookup = tempData[i] & 0xffff;
1160:                                        // Get the blue value
1161:                                        cmapValue = colormap[lookup + len2];
1162:                                        sdata[count++] = (short) (cmapValue & 0xffff);
1163:                                        // Get the green value
1164:                                        cmapValue = colormap[lookup + len];
1165:                                        sdata[count++] = (short) (cmapValue & 0xffff);
1166:                                        // Get the red value
1167:                                        cmapValue = colormap[lookup];
1168:                                        sdata[count++] = (short) (cmapValue & 0xffff);
1169:                                    }
1170:
1171:                                } else if (dataType == DataBuffer.TYPE_SHORT) {
1172:
1173:                                    // Expand the palette image into an rgb image with
1174:                                    // short data type.
1175:                                    int cmapValue;
1176:                                    int count = 0, lookup, len = colormap.length / 3;
1177:                                    int len2 = len * 2;
1178:                                    for (int i = 0; i < unitsBeforeLookup; i++) {
1179:                                        // Get the index into the colormap
1180:                                        lookup = tempData[i] & 0xffff;
1181:                                        // Get the blue value
1182:                                        cmapValue = colormap[lookup + len2];
1183:                                        sdata[count++] = (short) cmapValue;
1184:                                        // Get the green value
1185:                                        cmapValue = colormap[lookup + len];
1186:                                        sdata[count++] = (short) cmapValue;
1187:                                        // Get the red value
1188:                                        cmapValue = colormap[lookup];
1189:                                        sdata[count++] = (short) cmapValue;
1190:                                    }
1191:                                }
1192:
1193:                            } else {
1194:
1195:                                // No lookup being done here, when RGB values are needed,
1196:                                // the associated IndexColorModel can be used to get them.
1197:
1198:                                try {
1199:
1200:                                    if (compression == COMP_PACKBITS) {
1201:
1202:                                        stream.readFully(data, 0, byteCount);
1203:
1204:                                        // Since unitsInThisTile is the number of shorts,
1205:                                        // but we do our decompression in terms of bytes, we
1206:                                        // need to multiply unitsInThisTile by 2 in order to
1207:                                        // figure out how many bytes we'll get after
1208:                                        // decompression.
1209:                                        int bytesInThisTile = unitsInThisTile * 2;
1210:
1211:                                        byte byteArray[] = new byte[bytesInThisTile];
1212:                                        decodePackbits(data, bytesInThisTile,
1213:                                                byteArray);
1214:                                        interpretBytesAsShorts(byteArray,
1215:                                                sdata, unitsInThisTile);
1216:
1217:                                    } else if (compression == COMP_LZW) {
1218:
1219:                                        stream.readFully(data, 0, byteCount);
1220:
1221:                                        // Since unitsInThisTile is the number of shorts,
1222:                                        // but we do our decompression in terms of bytes, we
1223:                                        // need to multiply unitsInThisTile by 2 in order to
1224:                                        // figure out how many bytes we'll get after
1225:                                        // decompression.
1226:                                        byte byteArray[] = new byte[unitsInThisTile * 2];
1227:                                        lzwDecoder.decode(data, byteArray,
1228:                                                newRect.height);
1229:                                        interpretBytesAsShorts(byteArray,
1230:                                                sdata, unitsInThisTile);
1231:
1232:                                    } else if (compression == COMP_DEFLATE) {
1233:
1234:                                        stream.readFully(data, 0, byteCount);
1235:                                        byte byteArray[] = new byte[unitsInThisTile * 2];
1236:                                        inflate(data, byteArray);
1237:                                        interpretBytesAsShorts(byteArray,
1238:                                                sdata, unitsInThisTile);
1239:
1240:                                    } else if (compression == COMP_NONE) {
1241:
1242:                                        readShorts(byteCount / 2, sdata);
1243:                                    }
1244:
1245:                                    stream.seek(save_offset);
1246:
1247:                                } catch (IOException ioe) {
1248:                                    String message = JaiI18N
1249:                                            .getString("TIFFImage13");
1250:                                    ImagingListenerProxy.errorOccurred(message,
1251:                                            new ImagingException(message, ioe),
1252:                                            this , false);
1253:                                    //			throw new RuntimeException(
1254:                                    //					JaiI18N.getString("TIFFImage13"));
1255:                                }
1256:                            }
1257:
1258:                        } else if (sampleSize == 8) {
1259:
1260:                            if (decodePaletteAsShorts) {
1261:
1262:                                byte tempData[] = null;
1263:
1264:                                // At this point the data is 1 banded and will
1265:                                // become 3 banded only after we've done the palette
1266:                                // lookup, since unitsInThisTile was calculated with
1267:                                // 3 bands, we need to divide this by 3.
1268:                                int unitsBeforeLookup = unitsInThisTile / 3;
1269:
1270:                                // Read the data, if compressed, decode it, reset the pointer
1271:                                try {
1272:
1273:                                    if (compression == COMP_PACKBITS) {
1274:
1275:                                        stream.readFully(data, 0, byteCount);
1276:                                        tempData = new byte[unitsBeforeLookup];
1277:                                        decodePackbits(data, unitsBeforeLookup,
1278:                                                tempData);
1279:
1280:                                    } else if (compression == COMP_LZW) {
1281:
1282:                                        stream.readFully(data, 0, byteCount);
1283:                                        tempData = new byte[unitsBeforeLookup];
1284:                                        lzwDecoder.decode(data, tempData,
1285:                                                newRect.height);
1286:
1287:                                    } else if (compression == COMP_JPEG_TTN2) {
1288:
1289:                                        stream.readFully(data, 0, byteCount);
1290:                                        Raster tempTile = decodeJPEG(data,
1291:                                                decodeParam, colorConvertJPEG,
1292:                                                tile.getMinX(), tile.getMinY());
1293:                                        int[] tempPixels = new int[unitsBeforeLookup];
1294:                                        tempTile.getPixels(tile.getMinX(), tile
1295:                                                .getMinY(), tile.getWidth(),
1296:                                                tile.getHeight(), tempPixels);
1297:                                        tempData = new byte[unitsBeforeLookup];
1298:                                        for (int i = 0; i < unitsBeforeLookup; i++) {
1299:                                            tempData[i] = (byte) tempPixels[i];
1300:                                        }
1301:
1302:                                    } else if (compression == COMP_DEFLATE) {
1303:
1304:                                        stream.readFully(data, 0, byteCount);
1305:                                        tempData = new byte[unitsBeforeLookup];
1306:                                        inflate(data, tempData);
1307:
1308:                                    } else if (compression == COMP_NONE) {
1309:
1310:                                        tempData = new byte[byteCount];
1311:                                        stream
1312:                                                .readFully(tempData, 0,
1313:                                                        byteCount);
1314:                                    }
1315:
1316:                                    stream.seek(save_offset);
1317:
1318:                                } catch (IOException ioe) {
1319:                                    String message = JaiI18N
1320:                                            .getString("TIFFImage13");
1321:                                    ImagingListenerProxy.errorOccurred(message,
1322:                                            new ImagingException(message, ioe),
1323:                                            this , false);
1324:                                    //		throw new RuntimeException(
1325:                                    //					JaiI18N.getString("TIFFImage13"));
1326:                                }
1327:
1328:                                // Expand the palette image into an rgb image with ushort
1329:                                // data type.
1330:                                int cmapValue;
1331:                                int count = 0, lookup, len = colormap.length / 3;
1332:                                int len2 = len * 2;
1333:                                for (int i = 0; i < unitsBeforeLookup; i++) {
1334:                                    // Get the index into the colormap
1335:                                    lookup = tempData[i] & 0xff;
1336:                                    // Get the blue value
1337:                                    cmapValue = colormap[lookup + len2];
1338:                                    sdata[count++] = (short) (cmapValue & 0xffff);
1339:                                    // Get the green value
1340:                                    cmapValue = colormap[lookup + len];
1341:                                    sdata[count++] = (short) (cmapValue & 0xffff);
1342:                                    // Get the red value
1343:                                    cmapValue = colormap[lookup];
1344:                                    sdata[count++] = (short) (cmapValue & 0xffff);
1345:                                }
1346:                            } else {
1347:
1348:                                // No lookup being done here, when RGB values are needed,
1349:                                // the associated IndexColorModel can be used to get them.
1350:
1351:                                try {
1352:
1353:                                    if (compression == COMP_PACKBITS) {
1354:
1355:                                        stream.readFully(data, 0, byteCount);
1356:                                        decodePackbits(data, unitsInThisTile,
1357:                                                bdata);
1358:
1359:                                    } else if (compression == COMP_LZW) {
1360:
1361:                                        stream.readFully(data, 0, byteCount);
1362:                                        lzwDecoder.decode(data, bdata,
1363:                                                newRect.height);
1364:
1365:                                    } else if (compression == COMP_JPEG_TTN2) {
1366:
1367:                                        stream.readFully(data, 0, byteCount);
1368:                                        tile
1369:                                                .setRect(decodeJPEG(data,
1370:                                                        decodeParam,
1371:                                                        colorConvertJPEG, tile
1372:                                                                .getMinX(),
1373:                                                        tile.getMinY()));
1374:
1375:                                    } else if (compression == COMP_DEFLATE) {
1376:
1377:                                        stream.readFully(data, 0, byteCount);
1378:                                        inflate(data, bdata);
1379:
1380:                                    } else if (compression == COMP_NONE) {
1381:
1382:                                        stream.readFully(bdata, 0, byteCount);
1383:                                    }
1384:
1385:                                    stream.seek(save_offset);
1386:
1387:                                } catch (IOException ioe) {
1388:                                    String message = JaiI18N
1389:                                            .getString("TIFFImage13");
1390:                                    ImagingListenerProxy.errorOccurred(message,
1391:                                            new ImagingException(message, ioe),
1392:                                            this , false);
1393:                                    //		throw new RuntimeException(
1394:                                    //					JaiI18N.getString("TIFFImage13"));
1395:                                }
1396:                            }
1397:
1398:                        } else if (sampleSize == 4) {
1399:
1400:                            int padding = (newRect.width % 2 == 0) ? 0 : 1;
1401:                            int bytesPostDecoding = ((newRect.width / 2 + padding) * newRect.height);
1402:
1403:                            // Output short images
1404:                            if (decodePaletteAsShorts) {
1405:
1406:                                byte tempData[] = null;
1407:
1408:                                try {
1409:                                    stream.readFully(data, 0, byteCount);
1410:                                    stream.seek(save_offset);
1411:                                } catch (IOException ioe) {
1412:                                    String message = JaiI18N
1413:                                            .getString("TIFFImage13");
1414:                                    ImagingListenerProxy.errorOccurred(message,
1415:                                            new ImagingException(message, ioe),
1416:                                            this , false);
1417:                                    //			throw new RuntimeException(
1418:                                    //					JaiI18N.getString("TIFFImage13"));
1419:                                }
1420:
1421:                                // If compressed, decode the data.
1422:                                if (compression == COMP_PACKBITS) {
1423:
1424:                                    tempData = new byte[bytesPostDecoding];
1425:                                    decodePackbits(data, bytesPostDecoding,
1426:                                            tempData);
1427:
1428:                                } else if (compression == COMP_LZW) {
1429:
1430:                                    tempData = new byte[bytesPostDecoding];
1431:                                    lzwDecoder.decode(data, tempData,
1432:                                            newRect.height);
1433:
1434:                                } else if (compression == COMP_DEFLATE) {
1435:
1436:                                    tempData = new byte[bytesPostDecoding];
1437:                                    inflate(data, tempData);
1438:
1439:                                } else if (compression == COMP_NONE) {
1440:
1441:                                    tempData = data;
1442:                                }
1443:
1444:                                int bytes = unitsInThisTile / 3;
1445:
1446:                                // Unpack the 2 pixels packed into each byte.
1447:                                data = new byte[bytes];
1448:
1449:                                int srcCount = 0, dstCount = 0;
1450:                                for (int j = 0; j < newRect.height; j++) {
1451:                                    for (int i = 0; i < newRect.width / 2; i++) {
1452:                                        data[dstCount++] = (byte) ((tempData[srcCount] & 0xf0) >> 4);
1453:                                        data[dstCount++] = (byte) (tempData[srcCount++] & 0x0f);
1454:                                    }
1455:
1456:                                    if (padding == 1) {
1457:                                        data[dstCount++] = (byte) ((tempData[srcCount++] & 0xf0) >> 4);
1458:                                    }
1459:                                }
1460:
1461:                                int len = colormap.length / 3;
1462:                                int len2 = len * 2;
1463:                                int cmapValue, lookup;
1464:                                int count = 0;
1465:                                for (int i = 0; i < bytes; i++) {
1466:                                    lookup = data[i] & 0xff;
1467:                                    cmapValue = colormap[lookup + len2];
1468:                                    sdata[count++] = (short) (cmapValue & 0xffff);
1469:                                    cmapValue = colormap[lookup + len];
1470:                                    sdata[count++] = (short) (cmapValue & 0xffff);
1471:                                    cmapValue = colormap[lookup];
1472:                                    sdata[count++] = (short) (cmapValue & 0xffff);
1473:                                }
1474:                            } else {
1475:
1476:                                // Output byte values, use IndexColorModel for unpacking
1477:                                try {
1478:
1479:                                    // If compressed, decode the data.
1480:                                    if (compression == COMP_PACKBITS) {
1481:
1482:                                        stream.readFully(data, 0, byteCount);
1483:                                        decodePackbits(data, bytesPostDecoding,
1484:                                                bdata);
1485:
1486:                                    } else if (compression == COMP_LZW) {
1487:
1488:                                        stream.readFully(data, 0, byteCount);
1489:                                        lzwDecoder.decode(data, bdata,
1490:                                                newRect.height);
1491:
1492:                                    } else if (compression == COMP_DEFLATE) {
1493:
1494:                                        stream.readFully(data, 0, byteCount);
1495:                                        inflate(data, bdata);
1496:
1497:                                    } else if (compression == COMP_NONE) {
1498:
1499:                                        stream.readFully(bdata, 0, byteCount);
1500:                                    }
1501:
1502:                                    stream.seek(save_offset);
1503:
1504:                                } catch (IOException ioe) {
1505:                                    String message = JaiI18N
1506:                                            .getString("TIFFImage13");
1507:                                    ImagingListenerProxy.errorOccurred(message,
1508:                                            new ImagingException(message, ioe),
1509:                                            this , false);
1510:                                    //			throw new RuntimeException(
1511:                                    //					JaiI18N.getString("TIFFImage13"));
1512:                                }
1513:                            }
1514:                        }
1515:                    } else if (imageType == TYPE_GRAY_4BIT) { // 4-bit gray
1516:                        try {
1517:                            if (compression == COMP_PACKBITS) {
1518:
1519:                                stream.readFully(data, 0, byteCount);
1520:
1521:                                // Since the decompressed data will still be packed
1522:                                // 2 pixels into 1 byte, calculate bytesInThisTile
1523:                                int bytesInThisTile;
1524:                                if ((newRect.width % 8) == 0) {
1525:                                    bytesInThisTile = (newRect.width / 2)
1526:                                            * newRect.height;
1527:                                } else {
1528:                                    bytesInThisTile = (newRect.width / 2 + 1)
1529:                                            * newRect.height;
1530:                                }
1531:
1532:                                decodePackbits(data, bytesInThisTile, bdata);
1533:
1534:                            } else if (compression == COMP_LZW) {
1535:
1536:                                stream.readFully(data, 0, byteCount);
1537:                                lzwDecoder.decode(data, bdata, newRect.height);
1538:
1539:                            } else if (compression == COMP_DEFLATE) {
1540:
1541:                                stream.readFully(data, 0, byteCount);
1542:                                inflate(data, bdata);
1543:
1544:                            } else {
1545:
1546:                                stream.readFully(bdata, 0, byteCount);
1547:                            }
1548:
1549:                            stream.seek(save_offset);
1550:                        } catch (IOException ioe) {
1551:                            String message = JaiI18N.getString("TIFFImage13");
1552:                            ImagingListenerProxy.errorOccurred(message,
1553:                                    new ImagingException(message, ioe), this ,
1554:                                    false);
1555:                            //		throw new RuntimeException(JaiI18N.getString("TIFFImage13"));
1556:                        }
1557:                    } else { // everything else
1558:                        try {
1559:
1560:                            if (sampleSize == 8) {
1561:
1562:                                if (compression == COMP_NONE) {
1563:
1564:                                    stream.readFully(bdata, 0, byteCount);
1565:
1566:                                } else if (compression == COMP_LZW) {
1567:
1568:                                    stream.readFully(data, 0, byteCount);
1569:                                    lzwDecoder.decode(data, bdata,
1570:                                            newRect.height);
1571:
1572:                                } else if (compression == COMP_PACKBITS) {
1573:
1574:                                    stream.readFully(data, 0, byteCount);
1575:                                    decodePackbits(data, unitsInThisTile, bdata);
1576:
1577:                                } else if (compression == COMP_JPEG_TTN2) {
1578:
1579:                                    stream.readFully(data, 0, byteCount);
1580:                                    tile.setRect(decodeJPEG(data, decodeParam,
1581:                                            colorConvertJPEG, tile.getMinX(),
1582:                                            tile.getMinY()));
1583:                                } else if (compression == COMP_DEFLATE) {
1584:
1585:                                    stream.readFully(data, 0, byteCount);
1586:                                    inflate(data, bdata);
1587:                                }
1588:
1589:                            } else if (sampleSize == 16) {
1590:
1591:                                if (compression == COMP_NONE) {
1592:
1593:                                    readShorts(byteCount / 2, sdata);
1594:
1595:                                } else if (compression == COMP_LZW) {
1596:
1597:                                    stream.readFully(data, 0, byteCount);
1598:
1599:                                    // Since unitsInThisTile is the number of shorts,
1600:                                    // but we do our decompression in terms of bytes, we
1601:                                    // need to multiply unitsInThisTile by 2 in order to
1602:                                    // figure out how many bytes we'll get after
1603:                                    // decompression.
1604:                                    byte byteArray[] = new byte[unitsInThisTile * 2];
1605:                                    lzwDecoder.decode(data, byteArray,
1606:                                            newRect.height);
1607:                                    interpretBytesAsShorts(byteArray, sdata,
1608:                                            unitsInThisTile);
1609:
1610:                                } else if (compression == COMP_PACKBITS) {
1611:
1612:                                    stream.readFully(data, 0, byteCount);
1613:
1614:                                    // Since unitsInThisTile is the number of shorts,
1615:                                    // but we do our decompression in terms of bytes, we
1616:                                    // need to multiply unitsInThisTile by 2 in order to
1617:                                    // figure out how many bytes we'll get after
1618:                                    // decompression.
1619:                                    int bytesInThisTile = unitsInThisTile * 2;
1620:
1621:                                    byte byteArray[] = new byte[bytesInThisTile];
1622:                                    decodePackbits(data, bytesInThisTile,
1623:                                            byteArray);
1624:                                    interpretBytesAsShorts(byteArray, sdata,
1625:                                            unitsInThisTile);
1626:                                } else if (compression == COMP_DEFLATE) {
1627:
1628:                                    stream.readFully(data, 0, byteCount);
1629:                                    byte byteArray[] = new byte[unitsInThisTile * 2];
1630:                                    inflate(data, byteArray);
1631:                                    interpretBytesAsShorts(byteArray, sdata,
1632:                                            unitsInThisTile);
1633:
1634:                                }
1635:                            } else if (sampleSize == 32
1636:                                    && dataType == DataBuffer.TYPE_INT) { // redundant
1637:                                if (compression == COMP_NONE) {
1638:
1639:                                    readInts(byteCount / 4, idata);
1640:
1641:                                } else if (compression == COMP_LZW) {
1642:
1643:                                    stream.readFully(data, 0, byteCount);
1644:
1645:                                    // Since unitsInThisTile is the number of ints,
1646:                                    // but we do our decompression in terms of bytes, we
1647:                                    // need to multiply unitsInThisTile by 4 in order to
1648:                                    // figure out how many bytes we'll get after
1649:                                    // decompression.
1650:                                    byte byteArray[] = new byte[unitsInThisTile * 4];
1651:                                    lzwDecoder.decode(data, byteArray,
1652:                                            newRect.height);
1653:                                    interpretBytesAsInts(byteArray, idata,
1654:                                            unitsInThisTile);
1655:
1656:                                } else if (compression == COMP_PACKBITS) {
1657:
1658:                                    stream.readFully(data, 0, byteCount);
1659:
1660:                                    // Since unitsInThisTile is the number of ints,
1661:                                    // but we do our decompression in terms of bytes, we
1662:                                    // need to multiply unitsInThisTile by 4 in order to
1663:                                    // figure out how many bytes we'll get after
1664:                                    // decompression.
1665:                                    int bytesInThisTile = unitsInThisTile * 4;
1666:
1667:                                    byte byteArray[] = new byte[bytesInThisTile];
1668:                                    decodePackbits(data, bytesInThisTile,
1669:                                            byteArray);
1670:                                    interpretBytesAsInts(byteArray, idata,
1671:                                            unitsInThisTile);
1672:                                } else if (compression == COMP_DEFLATE) {
1673:
1674:                                    stream.readFully(data, 0, byteCount);
1675:                                    byte byteArray[] = new byte[unitsInThisTile * 4];
1676:                                    inflate(data, byteArray);
1677:                                    interpretBytesAsInts(byteArray, idata,
1678:                                            unitsInThisTile);
1679:
1680:                                }
1681:                            } else if (sampleSize == 32
1682:                                    && dataType == DataBuffer.TYPE_FLOAT) { // redundant
1683:                                if (compression == COMP_NONE) {
1684:
1685:                                    readFloats(byteCount / 4, fdata);
1686:
1687:                                } else if (compression == COMP_LZW) {
1688:
1689:                                    stream.readFully(data, 0, byteCount);
1690:
1691:                                    // Since unitsInThisTile is the number of floats,
1692:                                    // but we do our decompression in terms of bytes, we
1693:                                    // need to multiply unitsInThisTile by 4 in order to
1694:                                    // figure out how many bytes we'll get after
1695:                                    // decompression.
1696:                                    byte byteArray[] = new byte[unitsInThisTile * 4];
1697:                                    lzwDecoder.decode(data, byteArray,
1698:                                            newRect.height);
1699:                                    interpretBytesAsFloats(byteArray, fdata,
1700:                                            unitsInThisTile);
1701:
1702:                                } else if (compression == COMP_PACKBITS) {
1703:
1704:                                    stream.readFully(data, 0, byteCount);
1705:
1706:                                    // Since unitsInThisTile is the number of floats,
1707:                                    // but we do our decompression in terms of bytes, we
1708:                                    // need to multiply unitsInThisTile by 4 in order to
1709:                                    // figure out how many bytes we'll get after
1710:                                    // decompression.
1711:                                    int bytesInThisTile = unitsInThisTile * 4;
1712:
1713:                                    byte byteArray[] = new byte[bytesInThisTile];
1714:                                    decodePackbits(data, bytesInThisTile,
1715:                                            byteArray);
1716:                                    interpretBytesAsFloats(byteArray, fdata,
1717:                                            unitsInThisTile);
1718:                                } else if (compression == COMP_DEFLATE) {
1719:
1720:                                    stream.readFully(data, 0, byteCount);
1721:                                    byte byteArray[] = new byte[unitsInThisTile * 4];
1722:                                    inflate(data, byteArray);
1723:                                    interpretBytesAsFloats(byteArray, fdata,
1724:                                            unitsInThisTile);
1725:
1726:                                }
1727:                            }
1728:
1729:                            stream.seek(save_offset);
1730:
1731:                        } catch (IOException ioe) {
1732:                            String message = JaiI18N.getString("TIFFImage13");
1733:                            ImagingListenerProxy.errorOccurred(message,
1734:                                    new ImagingException(message, ioe), this ,
1735:                                    false);
1736:                            //		throw new RuntimeException(JaiI18N.getString("TIFFImage13"));
1737:                        }
1738:
1739:                        // Modify the data for certain special cases.
1740:                        switch (imageType) {
1741:                        case TYPE_GRAY:
1742:                        case TYPE_GRAY_ALPHA:
1743:                            if (isWhiteZero) {
1744:                                // Since we are using a ComponentColorModel with this
1745:                                // image, we need to change the WhiteIsZero data to
1746:                                // BlackIsZero data so it will display properly.
1747:                                if (dataType == DataBuffer.TYPE_BYTE
1748:                                        && !(colorModel instanceof  IndexColorModel)) {
1749:
1750:                                    for (int l = 0; l < bdata.length; l += numBands) {
1751:                                        bdata[l] = (byte) (255 - bdata[l]);
1752:                                    }
1753:                                } else if (dataType == DataBuffer.TYPE_USHORT) {
1754:
1755:                                    int ushortMax = Short.MAX_VALUE
1756:                                            - Short.MIN_VALUE;
1757:                                    for (int l = 0; l < sdata.length; l += numBands) {
1758:                                        sdata[l] = (short) (ushortMax - sdata[l]);
1759:                                    }
1760:
1761:                                } else if (dataType == DataBuffer.TYPE_SHORT) {
1762:
1763:                                    for (int l = 0; l < sdata.length; l += numBands) {
1764:                                        sdata[l] = (short) (~sdata[l]);
1765:                                    }
1766:                                } else if (dataType == DataBuffer.TYPE_INT) {
1767:
1768:                                    long uintMax = Integer.MAX_VALUE
1769:                                            - Integer.MIN_VALUE;
1770:                                    for (int l = 0; l < idata.length; l += numBands) {
1771:                                        idata[l] = (int) (uintMax - (long) idata[l]);
1772:                                    }
1773:                                }
1774:                            }
1775:                            break;
1776:                        case TYPE_YCBCR_SUB:
1777:                            // Post-processing for YCbCr with subsampled chrominance:
1778:                            // simply replicate the chroma channels for displayability.
1779:                            int pixelsPerDataUnit = chromaSubH * chromaSubV;
1780:
1781:                            int numH = newRect.width / chromaSubH;
1782:                            int numV = newRect.height / chromaSubV;
1783:
1784:                            byte[] tempData = new byte[numH * numV
1785:                                    * (pixelsPerDataUnit + 2)];
1786:                            System.arraycopy(bdata, 0, tempData, 0,
1787:                                    tempData.length);
1788:
1789:                            int samplesPerDataUnit = pixelsPerDataUnit * 3;
1790:                            int[] pixels = new int[samplesPerDataUnit];
1791:
1792:                            int bOffset = 0;
1793:                            int offsetCb = pixelsPerDataUnit;
1794:                            int offsetCr = offsetCb + 1;
1795:
1796:                            int y = newRect.y;
1797:                            for (int j = 0; j < numV; j++) {
1798:                                int x = newRect.x;
1799:                                for (int i = 0; i < numH; i++) {
1800:                                    int Cb = tempData[bOffset + offsetCb];
1801:                                    int Cr = tempData[bOffset + offsetCr];
1802:                                    int k = 0;
1803:                                    while (k < samplesPerDataUnit) {
1804:                                        pixels[k++] = tempData[bOffset++];
1805:                                        pixels[k++] = Cb;
1806:                                        pixels[k++] = Cr;
1807:                                    }
1808:                                    bOffset += 2;
1809:                                    tile.setPixels(x, y, chromaSubH,
1810:                                            chromaSubV, pixels);
1811:                                    x += chromaSubH;
1812:                                }
1813:                                y += chromaSubV;
1814:                            }
1815:
1816:                            break;
1817:                        }
1818:                    }
1819:
1820:                } // synchronized(this.stream)
1821:
1822:                return tile;
1823:            }
1824:
1825:            private void readShorts(int shortCount, short shortArray[]) {
1826:
1827:                // Since each short consists of 2 bytes, we need a
1828:                // byte array of double size
1829:                int byteCount = 2 * shortCount;
1830:                byte byteArray[] = new byte[byteCount];
1831:
1832:                try {
1833:                    stream.readFully(byteArray, 0, byteCount);
1834:                } catch (IOException ioe) {
1835:                    String message = JaiI18N.getString("TIFFImage13");
1836:                    ImagingListenerProxy.errorOccurred(message,
1837:                            new ImagingException(message, ioe), this , false);
1838:                    //	   throw new RuntimeException(JaiI18N.getString("TIFFImage13"));
1839:                }
1840:
1841:                interpretBytesAsShorts(byteArray, shortArray, shortCount);
1842:            }
1843:
1844:            private void readInts(int intCount, int intArray[]) {
1845:
1846:                // Since each int consists of 4 bytes, we need a
1847:                // byte array of quadruple size
1848:                int byteCount = 4 * intCount;
1849:                byte byteArray[] = new byte[byteCount];
1850:
1851:                try {
1852:                    stream.readFully(byteArray, 0, byteCount);
1853:                } catch (IOException ioe) {
1854:                    String message = JaiI18N.getString("TIFFImage13");
1855:                    ImagingListenerProxy.errorOccurred(message,
1856:                            new ImagingException(message, ioe), this , false);
1857:                    //	   throw new RuntimeException(JaiI18N.getString("TIFFImage13"));
1858:                }
1859:
1860:                interpretBytesAsInts(byteArray, intArray, intCount);
1861:            }
1862:
1863:            private void readFloats(int floatCount, float floatArray[]) {
1864:
1865:                // Since each float consists of 4 bytes, we need a
1866:                // byte array of quadruple size
1867:                int byteCount = 4 * floatCount;
1868:                byte byteArray[] = new byte[byteCount];
1869:
1870:                try {
1871:                    stream.readFully(byteArray, 0, byteCount);
1872:                } catch (IOException ioe) {
1873:                    String message = JaiI18N.getString("TIFFImage13");
1874:                    ImagingListenerProxy.errorOccurred(message,
1875:                            new ImagingException(message, ioe), this , false);
1876:                    //	   throw new RuntimeException(JaiI18N.getString("TIFFImage13"));
1877:                }
1878:
1879:                interpretBytesAsFloats(byteArray, floatArray, floatCount);
1880:            }
1881:
1882:            // Method to interpret a byte array to a short array, depending on
1883:            // whether the bytes are stored in a big endian or little endian format.
1884:            private void interpretBytesAsShorts(byte byteArray[],
1885:                    short shortArray[], int shortCount) {
1886:
1887:                int j = 0;
1888:                int firstByte, secondByte;
1889:
1890:                if (isBigEndian) {
1891:
1892:                    for (int i = 0; i < shortCount; i++) {
1893:                        firstByte = byteArray[j++] & 0xff;
1894:                        secondByte = byteArray[j++] & 0xff;
1895:                        shortArray[i] = (short) ((firstByte << 8) + secondByte);
1896:                    }
1897:
1898:                } else {
1899:
1900:                    for (int i = 0; i < shortCount; i++) {
1901:                        firstByte = byteArray[j++] & 0xff;
1902:                        secondByte = byteArray[j++] & 0xff;
1903:                        shortArray[i] = (short) ((secondByte << 8) + firstByte);
1904:                    }
1905:                }
1906:            }
1907:
1908:            // Method to interpret a byte array to a int array, depending on
1909:            // whether the bytes are stored in a big endian or little endian format.
1910:            private void interpretBytesAsInts(byte byteArray[], int intArray[],
1911:                    int intCount) {
1912:
1913:                int j = 0;
1914:
1915:                if (isBigEndian) {
1916:
1917:                    for (int i = 0; i < intCount; i++) {
1918:                        intArray[i] = (int) (((byteArray[j++] & 0xff) << 24)
1919:                                | ((byteArray[j++] & 0xff) << 16)
1920:                                | ((byteArray[j++] & 0xff) << 8) | (byteArray[j++] & 0xff));
1921:                    }
1922:
1923:                } else {
1924:
1925:                    for (int i = 0; i < intCount; i++) {
1926:                        intArray[i] = (int) ((byteArray[j++] & 0xff)
1927:                                | ((byteArray[j++] & 0xff) << 8)
1928:                                | ((byteArray[j++] & 0xff) << 16) | ((byteArray[j++] & 0xff) << 24));
1929:                    }
1930:                }
1931:            }
1932:
1933:            // Method to interpret a byte array to a float array, depending on
1934:            // whether the bytes are stored in a big endian or little endian format.
1935:            private void interpretBytesAsFloats(byte byteArray[],
1936:                    float floatArray[], int floatCount) {
1937:
1938:                int j = 0;
1939:
1940:                if (isBigEndian) {
1941:
1942:                    for (int i = 0; i < floatCount; i++) {
1943:                        int value = (int) (((byteArray[j++] & 0xff) << 24)
1944:                                | ((byteArray[j++] & 0xff) << 16)
1945:                                | ((byteArray[j++] & 0xff) << 8) | (byteArray[j++] & 0xff));
1946:                        floatArray[i] = Float.intBitsToFloat(value);
1947:                    }
1948:
1949:                } else {
1950:
1951:                    for (int i = 0; i < floatCount; i++) {
1952:                        int value = (int) ((byteArray[j++] & 0xff)
1953:                                | ((byteArray[j++] & 0xff) << 8)
1954:                                | ((byteArray[j++] & 0xff) << 16) | ((byteArray[j++] & 0xff) << 24));
1955:                        floatArray[i] = Float.intBitsToFloat(value);
1956:                    }
1957:                }
1958:            }
1959:
1960:            // Uncompress packbits compressed image data.
1961:            private byte[] decodePackbits(byte data[], int arraySize, byte[] dst) {
1962:
1963:                if (dst == null) {
1964:                    dst = new byte[arraySize];
1965:                }
1966:
1967:                int srcCount = 0, dstCount = 0;
1968:                int srcArraySize = data.length;
1969:                byte repeat, b;
1970:
1971:                try {
1972:
1973:                    while (dstCount < arraySize && srcCount < srcArraySize) {
1974:
1975:                        b = data[srcCount++];
1976:
1977:                        if (b >= 0 && b <= 127) {
1978:
1979:                            // literal run packet
1980:                            for (int i = 0; i < (b + 1); i++) {
1981:                                dst[dstCount++] = data[srcCount++];
1982:                            }
1983:
1984:                        } else if (b <= -1 && b >= -127) {
1985:
1986:                            // 2 byte encoded run packet
1987:                            repeat = data[srcCount++];
1988:                            for (int i = 0; i < (-b + 1); i++) {
1989:                                dst[dstCount++] = repeat;
1990:                            }
1991:
1992:                        } else {
1993:                            // no-op packet. Do nothing
1994:                            srcCount++;
1995:                        }
1996:                    }
1997:                } catch (java.lang.ArrayIndexOutOfBoundsException ae) {
1998:                    String message = JaiI18N.getString("TIFFImage14");
1999:                    ImagingListenerProxy.errorOccurred(message,
2000:                            new ImagingException(message, ae), this , false);
2001:                    //	    throw new RuntimeException(JaiI18N.getString("TIFFImage14"));
2002:                }
2003:
2004:                return dst;
2005:            }
2006:
2007:            // Need a createColorModel().
2008:            // Create ComponentColorModel for TYPE_RGB images
2009:            private ComponentColorModel createAlphaComponentColorModel(
2010:                    int dataType, int numBands, boolean isAlphaPremultiplied,
2011:                    int transparency) {
2012:
2013:                ComponentColorModel ccm = null;
2014:                int RGBBits[] = null;
2015:                ColorSpace cs = null;
2016:                switch (numBands) {
2017:                case 2: // gray+alpha
2018:                    cs = ColorSpace.getInstance(ColorSpace.CS_GRAY);
2019:                    break;
2020:                case 4: // RGB+alpha
2021:                    cs = ColorSpace.getInstance(ColorSpace.CS_sRGB);
2022:                    break;
2023:                default:
2024:                    throw new IllegalArgumentException();
2025:                }
2026:
2027:                if (dataType == DataBuffer.TYPE_FLOAT) {
2028:                    ccm = new FloatDoubleColorModel(cs, true,
2029:                            isAlphaPremultiplied, transparency, dataType);
2030:                } else { // all other types
2031:                    int componentSize = 0;
2032:                    switch (dataType) {
2033:                    case DataBuffer.TYPE_BYTE:
2034:                        componentSize = 8;
2035:                        break;
2036:                    case DataBuffer.TYPE_USHORT:
2037:                    case DataBuffer.TYPE_SHORT:
2038:                        componentSize = 16;
2039:                        break;
2040:                    case DataBuffer.TYPE_INT:
2041:                        componentSize = 32;
2042:                        break;
2043:                    default:
2044:                        throw new IllegalArgumentException();
2045:                    }
2046:
2047:                    RGBBits = new int[numBands];
2048:                    for (int i = 0; i < numBands; i++) {
2049:                        RGBBits[i] = componentSize;
2050:                    }
2051:
2052:                    ccm = new ComponentColorModel(cs, RGBBits, true,
2053:                            isAlphaPremultiplied, transparency, dataType);
2054:                }
2055:
2056:                return ccm;
2057:            }
2058:        }
2059:
2060:        /**
2061:         * Wrapper class for a <code>SeekableStream</code> but which does not throw
2062:         * an <code>EOFException</code> from <code>readFully()</code> when the end
2063:         * of stream is encountered.
2064:         */
2065:        // NB This is a hack to fix bug 4823200 "Make TIFF decoder robust to (comp)
2066:        // images with no strip/tile byte counts field" but there does not seem to
2067:        // be any other way to work around this without extensive code changes.
2068:        class NoEOFStream extends SeekableStream {
2069:            private SeekableStream stream;
2070:
2071:            NoEOFStream(SeekableStream ss) {
2072:                if (ss == null) {
2073:                    throw new IllegalArgumentException();
2074:                }
2075:
2076:                this .stream = ss;
2077:            }
2078:
2079:            public int read() throws IOException {
2080:                int b = stream.read();
2081:                return b < 0 ? 0 : b;
2082:            }
2083:
2084:            public int read(byte[] b, int off, int len) throws IOException {
2085:                int count = stream.read(b, off, len);
2086:                return count < 0 ? len : count;
2087:            }
2088:
2089:            public long getFilePointer() throws IOException {
2090:                return stream.getFilePointer();
2091:            }
2092:
2093:            public void seek(long pos) throws IOException {
2094:                stream.seek(pos);
2095:            }
2096:        }
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.