001: package org.libtiff.jai.codecimpl;
002:
003: /*
004: * XTIFF: eXtensible TIFF libraries for JAI.
005: *
006: * The contents of this file are subject to the JAVA ADVANCED IMAGING SAMPLE
007: * INPUT-OUTPUT CODECS AND WIDGET HANDLING SOURCE CODE License Version 1.0 (the
008: * "License"); You may not use this file except in compliance with the License.
009: * You may obtain a copy of the License at
010: * http://www.sun.com/software/imaging/JAI/index.html
011: *
012: * Software distributed under the License is distributed on an "AS IS" basis,
013: * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
014: * the specific language governing rights and limitations under the License.
015: *
016: * The Original Code is JAVA ADVANCED IMAGING SAMPLE INPUT-OUTPUT CODECS AND
017: * WIDGET HANDLING SOURCE CODE. The Initial Developer of the Original Code is:
018: * Sun Microsystems, Inc.. Portions created by: Niles Ritter are Copyright (C):
019: * Niles Ritter, GeoTIFF.org, 1999,2000. All Rights Reserved. Contributor(s):
020: * Niles Ritter
021: */
022:
023: import java.awt.Rectangle;
024: import java.awt.Transparency;
025: import java.awt.color.ColorSpace;
026: import java.awt.image.ComponentColorModel;
027: import java.awt.image.DataBuffer;
028: import java.awt.image.IndexColorModel;
029: import java.awt.image.MultiPixelPackedSampleModel;
030: import java.awt.image.PixelInterleavedSampleModel;
031: import java.awt.image.Raster;
032: import java.awt.image.WritableRaster;
033: import java.io.IOException;
034:
035: import javax.media.jai.RasterFactory;
036:
037: import org.libtiff.jai.codec.XTIFF;
038: import org.libtiff.jai.codec.XTIFFDecodeParam;
039: import org.libtiff.jai.codec.XTIFFDirectory;
040: import org.libtiff.jai.codec.XTIFFField;
041: import org.libtiff.jai.codec.XTIFFTileCodec;
042: import org.libtiff.jai.util.JaiI18N;
043:
044: import com.sun.media.jai.codec.ImageCodec;
045: import com.sun.media.jai.codec.SeekableStream;
046: import com.sun.media.jai.codec.TIFFDecodeParam;
047: import com.sun.media.jai.codecimpl.SimpleRenderedImage;
048:
049: public class XTIFFImage extends SimpleRenderedImage {
050:
051: XTIFFTileCodec codec;
052: XTIFFDirectory dir;
053: TIFFDecodeParam param;
054: int photometric_interp;
055: SeekableStream stream;
056: int tileSize;
057: int tilesX, tilesY;
058: long[] tileOffsets;
059: long tileByteCounts[];
060: char colormap[];
061: char bitsPerSample[];
062: int samplesPerPixel;
063: int extraSamples;
064: byte palette[];
065: int bands;
066: char sampleFormat[];
067:
068: boolean decodePaletteAsShorts;
069:
070: boolean isBigEndian;
071:
072: // Image types
073: int image_type;
074: int dataType;
075:
076: /**
077: * Constructs a XTIFFImage that acquires its data from a given SeekableStream
078: * and reads from a particular IFD of the stream. The index of the first IFD
079: * is 0.
080: *
081: * @param stream
082: * the SeekableStream to read from.
083: * @param param
084: * an instance of TIFFDecodeParam, or null.
085: * @param directory
086: * the index of the IFD to read from.
087: */
088: public XTIFFImage(SeekableStream stream, TIFFDecodeParam param,
089: int directory) throws IOException {
090:
091: this .stream = stream;
092: if (param == null || !(param instanceof XTIFFDecodeParam)) {
093: param = new XTIFFDecodeParam(param);
094: }
095: this .param = param;
096:
097: decodePaletteAsShorts = param.getDecodePaletteAsShorts();
098:
099: // Read the specified directory.
100: dir = XTIFFDirectory.create(stream, directory);
101: properties.put("tiff.directory", dir);
102: ((XTIFFDecodeParam) param).setDirectory(dir);
103:
104: // Check whether big endian or little endian format is used.
105: isBigEndian = dir.isBigEndian();
106:
107: setupImageParameters();
108:
109: setupSamplesAndColor();
110:
111: dir.setImageType(image_type);
112:
113: // Calculate number of tiles and the tileSize in bytes
114: tilesX = (width + tileWidth - 1) / tileWidth;
115: tilesY = (height + tileHeight - 1) / tileHeight;
116: tileSize = tileWidth * tileHeight * bands;
117:
118: try {
119: codec = dir.createTileCodec((XTIFFDecodeParam) param);
120: } catch (Exception e) {
121: }
122: }
123:
124: /**
125: * This method gets the image parameters from fields
126: */
127: protected void setupImageParameters() {
128:
129: // Set basic image layout
130: minX = minY = 0;
131: width = (int) dir.getFieldAsLong(XTIFF.TIFFTAG_IMAGE_WIDTH);
132: height = (int) dir.getFieldAsLong(XTIFF.TIFFTAG_IMAGE_LENGTH);
133:
134: photometric_interp = (int) dir
135: .getFieldAsLong(XTIFF.TIFFTAG_PHOTOMETRIC_INTERPRETATION);
136:
137: // Read the TIFFTAG_BITS_PER_SAMPLE field
138: XTIFFField bitsField = dir
139: .getField(XTIFF.TIFFTAG_BITS_PER_SAMPLE);
140:
141: if (bitsField == null) {
142: // Default
143: bitsPerSample = new char[1];
144: bitsPerSample[0] = 1;
145: } else {
146: bitsPerSample = bitsField.getAsChars();
147: }
148:
149: for (int i = 1; i < bitsPerSample.length; i++) {
150: if (bitsPerSample[i] != bitsPerSample[1]) {
151: throw new RuntimeException(JaiI18N
152: .getString("XTIFFImageDecoder19"));
153: }
154: }
155:
156: // Get the number of samples per pixel
157: XTIFFField sfield = dir
158: .getField(XTIFF.TIFFTAG_SAMPLES_PER_PIXEL);
159: if (sfield == null) {
160: samplesPerPixel = 1;
161: } else {
162: samplesPerPixel = (int) sfield.getAsLong(0);
163: }
164:
165: // Figure out if any extra samples are present.
166: XTIFFField efield = dir.getField(XTIFF.TIFFTAG_EXTRA_SAMPLES);
167: if (efield == null) {
168: extraSamples = 0;
169: } else {
170: extraSamples = (int) efield.getAsLong(0);
171: }
172:
173: // Read the TIFFTAG_SAMPLE_FORMAT tag to see whether the data might be
174: // signed or floating point
175: XTIFFField sampleFormatField = dir
176: .getField(XTIFF.TIFFTAG_SAMPLE_FORMAT);
177:
178: if (sampleFormatField != null) {
179: sampleFormat = sampleFormatField.getAsChars();
180:
181: // Check that all the samples have the same format
182: for (int l = 1; l < sampleFormat.length; l++) {
183: if (sampleFormat[l] != sampleFormat[0]) {
184: throw new RuntimeException(JaiI18N
185: .getString("XTIFFImageDecoder20"));
186: }
187: }
188:
189: } else {
190: sampleFormat = new char[] { 1 };
191: }
192:
193: if (sampleFormat[0] == 1 || sampleFormat[0] == 4) {
194:
195: // Unsigned or unknown
196: if (bitsPerSample[0] == 8) {
197: dataType = DataBuffer.TYPE_BYTE;
198: } else if (bitsPerSample[0] == 16) {
199: dataType = DataBuffer.TYPE_USHORT;
200: } else if (bitsPerSample[0] == 32) {
201: dataType = DataBuffer.TYPE_INT;
202: }
203:
204: } else if (sampleFormat[0] == 2) {
205: // Signed
206:
207: if (bitsPerSample[0] == 1 || bitsPerSample[0] == 4
208: || bitsPerSample[0] == 8) {
209:
210: throw new RuntimeException(JaiI18N
211: .getString("XTIFFImageDecoder21"));
212:
213: } else if (bitsPerSample[0] == 16) {
214: dataType = DataBuffer.TYPE_SHORT;
215: } else if (bitsPerSample[0] == 32) {
216: dataType = DataBuffer.TYPE_INT;
217: }
218:
219: } else if (sampleFormat[0] == 3) {
220: // Floating point
221: // dataType = DataBuffer.TYPE_FLOAT;
222: throw new RuntimeException(JaiI18N
223: .getString("XTIFFImageDecoder22"));
224: }
225:
226: if (dir.getField(XTIFF.TIFFTAG_TILE_WIDTH) != null) {
227: // Image is in tiled format
228: tileWidth = (int) dir
229: .getFieldAsLong(XTIFF.TIFFTAG_TILE_WIDTH);
230: tileHeight = (int) dir
231: .getFieldAsLong(XTIFF.TIFFTAG_TILE_LENGTH);
232: tileOffsets = (dir.getField(XTIFF.TIFFTAG_TILE_OFFSETS))
233: .getAsLongs();
234: tileByteCounts = dir.getField(
235: XTIFF.TIFFTAG_TILE_BYTE_COUNTS).getAsLongs();
236:
237: } else {
238:
239: // Image is in stripped format, looks like tiles to us
240: tileWidth = width;
241: XTIFFField field = dir
242: .getField(XTIFF.TIFFTAG_ROWS_PER_STRIP);
243: if (field == null) {
244: // Default is infinity (2^32 -1), basically the entire image
245: // TODO: Can do a better job of tiling here
246: tileHeight = height;
247: } else {
248: long l = field.getAsLong(0);
249: long infinity = 1;
250: infinity = (infinity << 32) - 1;
251: if (l == infinity) {
252: // 2^32 - 1 (effectively infinity, entire image is 1 strip)
253: tileHeight = height;
254: } else {
255: tileHeight = (int) l;
256: }
257: }
258:
259: XTIFFField tileOffsetsField = dir
260: .getField(XTIFF.TIFFTAG_STRIP_OFFSETS);
261: if (tileOffsetsField == null) {
262: throw new RuntimeException(JaiI18N
263: .getString("XTIFFImageDecoder11"));
264: } else {
265: tileOffsets = tileOffsetsField.getAsLongs();
266: }
267:
268: XTIFFField tileByteCountsField = dir
269: .getField(XTIFF.TIFFTAG_STRIP_BYTE_COUNTS);
270: if (tileByteCountsField == null) {
271: throw new RuntimeException(JaiI18N
272: .getString("XTIFFImageDecoder12"));
273: } else {
274: tileByteCounts = tileByteCountsField.getAsLongs();
275: }
276: }
277: }
278:
279: /**
280: * This method constructs the sampleModel, colorModel, determines the
281: * image_type and the bands parameter.
282: */
283: protected void setupSamplesAndColor() {
284:
285: bands = samplesPerPixel;
286:
287: // Figure out which kind of image we are dealing with.
288: switch (photometric_interp) {
289:
290: case XTIFF.PHOTOMETRIC_WHITE_IS_ZERO:
291:
292: // bands = 1;
293:
294: // Bilevel or Grayscale - WhiteIsZero
295: if (bitsPerSample[0] == 1) {
296:
297: image_type = XTIFF.TYPE_BILEVEL_WHITE_IS_ZERO;
298:
299: // Keep pixels packed, use IndexColorModel
300: sampleModel = new MultiPixelPackedSampleModel(
301: DataBuffer.TYPE_BYTE, tileWidth, tileHeight, 1);
302:
303: // Set up the palette
304: byte r[] = new byte[] { (byte) 255, (byte) 0 };
305: byte g[] = new byte[] { (byte) 255, (byte) 0 };
306: byte b[] = new byte[] { (byte) 255, (byte) 0 };
307:
308: colorModel = new IndexColorModel(1, 2, r, g, b);
309:
310: } else {
311:
312: image_type = XTIFF.TYPE_GREYSCALE_WHITE_IS_ZERO;
313:
314: if (bitsPerSample[0] == 4) {
315: sampleModel = new MultiPixelPackedSampleModel(
316: DataBuffer.TYPE_BYTE, tileWidth,
317: tileHeight, 4);
318:
319: colorModel = ImageCodec.createGrayIndexColorModel(
320: sampleModel, false);
321:
322: } else if (bitsPerSample[0] == 8) {
323: sampleModel = RasterFactory
324: .createPixelInterleavedSampleModel(
325: DataBuffer.TYPE_BYTE, tileWidth,
326: tileHeight, bands);
327:
328: colorModel = ImageCodec.createGrayIndexColorModel(
329: sampleModel, false);
330:
331: } else if (bitsPerSample[0] == 16) {
332:
333: sampleModel = RasterFactory
334: .createPixelInterleavedSampleModel(
335: dataType, tileWidth, tileHeight,
336: bands);
337:
338: colorModel = ImageCodec
339: .createComponentColorModel(sampleModel);
340:
341: } else {
342: throw new IllegalArgumentException(JaiI18N
343: .getString("XTIFFImageDecoder14"));
344: }
345: }
346:
347: break;
348:
349: case XTIFF.PHOTOMETRIC_BLACK_IS_ZERO:
350:
351: // bands = 1;
352:
353: // Bilevel or Grayscale - BlackIsZero
354: if (bitsPerSample[0] == 1) {
355:
356: image_type = XTIFF.TYPE_BILEVEL_BLACK_IS_ZERO;
357:
358: // Keep pixels packed, use IndexColorModel
359: sampleModel = new MultiPixelPackedSampleModel(
360: DataBuffer.TYPE_BYTE, tileWidth, tileHeight, 1);
361:
362: // Set up the palette
363: byte r[] = new byte[] { (byte) 0, (byte) 255 };
364: byte g[] = new byte[] { (byte) 0, (byte) 255 };
365: byte b[] = new byte[] { (byte) 0, (byte) 255 };
366:
367: // 1 Bit pixels packed into a byte, use IndexColorModel
368: colorModel = new IndexColorModel(1, 2, r, g, b);
369:
370: } else {
371:
372: image_type = XTIFF.TYPE_GREYSCALE_BLACK_IS_ZERO;
373:
374: if (bitsPerSample[0] == 4) {
375: sampleModel = new MultiPixelPackedSampleModel(
376: DataBuffer.TYPE_BYTE, tileWidth,
377: tileHeight, 4);
378: colorModel = ImageCodec.createGrayIndexColorModel(
379: sampleModel, true);
380: } else if (bitsPerSample[0] == 8) {
381: sampleModel = RasterFactory
382: .createPixelInterleavedSampleModel(
383: DataBuffer.TYPE_BYTE, tileWidth,
384: tileHeight, bands);
385: colorModel = ImageCodec
386: .createComponentColorModel(sampleModel);
387:
388: } else if (bitsPerSample[0] == 16) {
389:
390: sampleModel = RasterFactory
391: .createPixelInterleavedSampleModel(
392: dataType, tileWidth, tileHeight,
393: bands);
394: colorModel = ImageCodec
395: .createComponentColorModel(sampleModel);
396:
397: } else {
398: throw new IllegalArgumentException(JaiI18N
399: .getString("XTIFFImageDecoder14"));
400: }
401: }
402:
403: break;
404:
405: case XTIFF.PHOTOMETRIC_RGB:
406:
407: // bands = samplesPerPixel;
408:
409: // RGB full color image
410: if (bitsPerSample[0] == 8) {
411:
412: sampleModel = RasterFactory
413: .createPixelInterleavedSampleModel(
414: DataBuffer.TYPE_BYTE, tileWidth,
415: tileHeight, bands);
416: } else if (bitsPerSample[0] == 16) {
417:
418: sampleModel = RasterFactory
419: .createPixelInterleavedSampleModel(dataType,
420: tileWidth, tileHeight, bands);
421: } else {
422: throw new RuntimeException(JaiI18N
423: .getString("XTIFFImageDecoder15"));
424: }
425:
426: if (samplesPerPixel < 3) {
427: throw new RuntimeException(JaiI18N
428: .getString("XTIFFImageDecoder1"));
429:
430: } else if (samplesPerPixel == 3) {
431:
432: image_type = XTIFF.TYPE_RGB;
433: // No alpha
434: colorModel = ImageCodec
435: .createComponentColorModel(sampleModel);
436:
437: } else if (samplesPerPixel == 4) {
438:
439: if (extraSamples == 0) {
440:
441: image_type = XTIFF.TYPE_ORGB;
442: // Transparency.OPAQUE signifies image data that is
443: // completely opaque, meaning that all pixels have an alpha
444: // value of 1.0. So the extra band gets ignored, which is
445: // what we want.
446: colorModel = createAlphaComponentColorModel(
447: dataType, true, false, Transparency.OPAQUE);
448:
449: } else if (extraSamples == 1) {
450:
451: image_type = XTIFF.TYPE_ARGB_PRE;
452: // Pre multiplied alpha.
453: colorModel = createAlphaComponentColorModel(
454: dataType, true, true,
455: Transparency.TRANSLUCENT);
456:
457: } else if (extraSamples == 2) {
458:
459: image_type = XTIFF.TYPE_ARGB;
460: // The extra sample here is unassociated alpha, usually a
461: // transparency mask, also called soft matte.
462: colorModel = createAlphaComponentColorModel(
463: dataType, true, false, Transparency.BITMASK);
464: }
465:
466: } else {
467: image_type = XTIFF.TYPE_RGB_EXTRA;
468:
469: // For this case we can't display the image, so there is no
470: // point in trying to reformat the data to be BGR followed by
471: // the ExtraSamples, the way Java2D would like it, because
472: // Java2D can't display it anyway. Therefore create a sample
473: // model with increasing bandOffsets, and keep the colorModel
474: // as null, as there is no appropriate ColorModel.
475:
476: int bandOffsets[] = new int[bands];
477: for (int i = 0; i < bands; i++) {
478: bandOffsets[i] = i;
479: }
480:
481: if (bitsPerSample[0] == 8) {
482:
483: sampleModel = new PixelInterleavedSampleModel(
484: DataBuffer.TYPE_BYTE, tileWidth,
485: tileHeight, bands, bands * tileWidth,
486: bandOffsets);
487: colorModel = null;
488:
489: } else if (bitsPerSample[0] == 16) {
490:
491: sampleModel = new PixelInterleavedSampleModel(
492: dataType, tileWidth, tileHeight, bands,
493: bands * tileWidth, bandOffsets);
494: colorModel = null;
495: }
496: }
497:
498: break;
499:
500: case XTIFF.PHOTOMETRIC_PALETTE:
501:
502: image_type = XTIFF.TYPE_PALETTE;
503:
504: // Get the colormap
505: XTIFFField cfield = dir.getField(XTIFF.TIFFTAG_COLORMAP);
506: if (cfield == null) {
507: throw new RuntimeException(JaiI18N
508: .getString("XTIFFImageDecoder2"));
509: } else {
510: colormap = cfield.getAsChars();
511: }
512:
513: // Could be either 1 or 3 bands depending on whether we use
514: // IndexColorModel or not.
515: if (decodePaletteAsShorts) {
516: // bands = 3;
517:
518: if (bitsPerSample[0] != 4 && bitsPerSample[0] != 8
519: && bitsPerSample[0] != 16) {
520: throw new RuntimeException(JaiI18N
521: .getString("XTIFFImageDecoder13"));
522: }
523:
524: // If no SampleFormat tag was specified and if the
525: // bitsPerSample are less than or equal to 8, then the
526: // dataType was initially set to byte, but now we want to
527: // expand the palette as shorts, so the dataType should
528: // be ushort.
529: if (dataType == DataBuffer.TYPE_BYTE) {
530: dataType = DataBuffer.TYPE_USHORT;
531: }
532:
533: // Data will have to be unpacked into a 3 band short image
534: // as we do not have a IndexColorModel that can deal with
535: // a colormodel whose entries are of short data type.
536: sampleModel = RasterFactory
537: .createPixelInterleavedSampleModel(dataType,
538: tileWidth, tileHeight, bands);
539: colorModel = ImageCodec
540: .createComponentColorModel(sampleModel);
541:
542: } else {
543:
544: // bands = 1;
545:
546: if (bitsPerSample[0] == 4) {
547: // Pixel data will not be unpacked, will use MPPSM to store
548: // packed data and IndexColorModel to do the unpacking.
549: sampleModel = new MultiPixelPackedSampleModel(
550: DataBuffer.TYPE_BYTE, tileWidth,
551: tileHeight, bitsPerSample[0]);
552: } else if (bitsPerSample[0] == 8) {
553: sampleModel = RasterFactory
554: .createPixelInterleavedSampleModel(
555: DataBuffer.TYPE_BYTE, tileWidth,
556: tileHeight, bands);
557: } else if (bitsPerSample[0] == 16) {
558:
559: // Here datatype has to be unsigned since we are storing
560: // indices into the IndexColorModel palette. Ofcourse
561: // the actual palette entries are allowed to be negative.
562: sampleModel = RasterFactory
563: .createPixelInterleavedSampleModel(
564: DataBuffer.TYPE_USHORT, tileWidth,
565: tileHeight, bands);
566: } else {
567: throw new RuntimeException(JaiI18N
568: .getString("XTIFFImageDecoder13"));
569: }
570:
571: int bandLength = colormap.length / 3;
572: byte r[] = new byte[bandLength];
573: byte g[] = new byte[bandLength];
574: byte b[] = new byte[bandLength];
575:
576: int gIndex = bandLength;
577: int bIndex = bandLength * 2;
578:
579: if (dataType == DataBuffer.TYPE_SHORT) {
580:
581: for (int i = 0; i < bandLength; i++) {
582: r[i] = param
583: .decodeSigned16BitsTo8Bits((short) colormap[i]);
584: g[i] = param
585: .decodeSigned16BitsTo8Bits((short) colormap[gIndex
586: + i]);
587: b[i] = param
588: .decodeSigned16BitsTo8Bits((short) colormap[bIndex
589: + i]);
590: }
591:
592: } else {
593:
594: for (int i = 0; i < bandLength; i++) {
595: r[i] = param
596: .decode16BitsTo8Bits(colormap[i] & 0xffff);
597: g[i] = param
598: .decode16BitsTo8Bits(colormap[gIndex
599: + i] & 0xffff);
600: b[i] = param
601: .decode16BitsTo8Bits(colormap[bIndex
602: + i] & 0xffff);
603: }
604:
605: }
606:
607: colorModel = new IndexColorModel(bitsPerSample[0],
608: bandLength, r, g, b);
609: }
610:
611: break;
612:
613: case XTIFF.PHOTOMETRIC_TRANSPARENCY:
614:
615: image_type = XTIFF.TYPE_TRANS;
616:
617: // Transparency Mask
618: throw new RuntimeException(JaiI18N
619: .getString("XTIFFImageDecoder3"));
620: // break;
621:
622: default:
623: throw new RuntimeException(JaiI18N
624: .getString("XTIFFImageDecoder4"));
625: }
626:
627: }
628:
629: /**
630: * Reads a private IFD from a given offset in the stream. This method may be
631: * used to obtain IFDs that are referenced only by private tag values.
632: */
633: public XTIFFDirectory getPrivateIFD(long offset) throws IOException {
634: return XTIFFDirectory.create(stream, offset);
635: }
636:
637: private WritableRaster tile00 = null;
638:
639: /**
640: * Returns tile (tileX, tileY) as a Raster.
641: */
642: public synchronized Raster getTile(int tileX, int tileY) {
643: if (tileX == 0 && tileY == 0 && tile00 != null) {
644: return tile00;
645: }
646:
647: if ((tileX < 0) || (tileX >= tilesX) || (tileY < 0)
648: || (tileY >= tilesY)) {
649: throw new IllegalArgumentException(JaiI18N
650: .getString("XTIFFImageDecoder5"));
651: }
652:
653: // file setup1
654:
655: // Save original file pointer position and seek to tile data location.
656: long save_offset = 0;
657: try {
658: save_offset = stream.getFilePointer();
659: stream.seek(tileOffsets[tileY * tilesX + tileX]);
660: } catch (IOException ioe) {
661: throw new RuntimeException(JaiI18N
662: .getString("XTIFFImageDecoder8"));
663: }
664:
665: // Number of bytes in this tile (strip) after compression.
666: int byteCount = (int) tileByteCounts[tileY * tilesX + tileX];
667:
668: // Find out the number of bytes in the current tile
669: Rectangle tileRect = new Rectangle(tileXToX(tileX),
670: tileYToY(tileY), tileWidth, tileHeight);
671: Rectangle newRect = tileRect.intersection(getBounds());
672:
673: // file setup2
674:
675: byte data[] = new byte[byteCount];
676: WritableRaster tile = null;
677: try {
678: stream.readFully(data, 0, byteCount);
679: tile = codec.decode(this , newRect, data);
680: stream.seek(save_offset);
681: } catch (IOException e) {
682: throw new RuntimeException("Failed to read raw tile data:"
683: + e);
684: }
685:
686: if (tileX == 0 && tileY == 0) {
687: tile00 = tile;
688: }
689: return tile;
690: }
691:
692: // Create ComponentColorModel for TYPE_RGB images
693: private ComponentColorModel createAlphaComponentColorModel(
694: int dataType, boolean hasAlpha,
695: boolean isAlphaPremultiplied, int transparency) {
696:
697: ComponentColorModel ccm = null;
698: int RGBBits[][] = new int[3][];
699:
700: RGBBits[0] = new int[] { 8, 8, 8, 8 }; // Byte
701: RGBBits[1] = new int[] { 16, 16, 16, 16 }; // Short
702: RGBBits[2] = new int[] { 16, 16, 16, 16 }; // UShort
703: RGBBits[2] = new int[] { 32, 32, 32, 32 }; // Int
704:
705: ccm = new ComponentColorModel(ColorSpace
706: .getInstance(ColorSpace.CS_sRGB), RGBBits[dataType],
707: hasAlpha, isAlphaPremultiplied, transparency, dataType);
708: return ccm;
709: }
710: }
|