001: /*
002: * Copyright 1995-2006 Sun Microsystems, Inc. All Rights Reserved.
003: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
004: *
005: * This code is free software; you can redistribute it and/or modify it
006: * under the terms of the GNU General Public License version 2 only, as
007: * published by the Free Software Foundation. Sun designates this
008: * particular file as subject to the "Classpath" exception as provided
009: * by Sun in the LICENSE file that accompanied this code.
010: *
011: * This code is distributed in the hope that it will be useful, but WITHOUT
012: * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
013: * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
014: * version 2 for more details (a copy is included in the LICENSE file that
015: * accompanied this code).
016: *
017: * You should have received a copy of the GNU General Public License version
018: * 2 along with this work; if not, write to the Free Software Foundation,
019: * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
020: *
021: * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
022: * CA 95054 USA or visit www.sun.com if you need additional information or
023: * have any questions.
024: */
025:
026: /*-
027: * Reads GIF images from an InputStream and reports the
028: * image data to an InputStreamImageSource object.
029: *
030: * The algorithm is copyright of CompuServe.
031: */
032: package sun.awt.image;
033:
034: import java.util.Vector;
035: import java.util.Hashtable;
036: import java.io.InputStream;
037: import java.io.IOException;
038: import java.awt.image.*;
039:
040: /**
041: * Gif Image converter
042: *
043: * @version 1.62 05/05/07
044: * @author Arthur van Hoff
045: * @author Jim Graham
046: */
047: public class GifImageDecoder extends ImageDecoder {
048: private static final boolean verbose = false;
049:
050: private static final int IMAGESEP = 0x2c;
051: private static final int EXBLOCK = 0x21;
052: private static final int EX_GRAPHICS_CONTROL = 0xf9;
053: private static final int EX_COMMENT = 0xfe;
054: private static final int EX_APPLICATION = 0xff;
055: private static final int TERMINATOR = 0x3b;
056: private static final int TRANSPARENCYMASK = 0x01;
057: private static final int INTERLACEMASK = 0x40;
058: private static final int COLORMAPMASK = 0x80;
059:
060: int num_global_colors;
061: byte[] global_colormap;
062: int trans_pixel = -1;
063: IndexColorModel global_model;
064:
065: Hashtable props = new Hashtable();
066:
067: byte[] saved_image;
068: IndexColorModel saved_model;
069:
070: int global_width;
071: int global_height;
072: int global_bgpixel;
073:
074: GifFrame curframe;
075:
076: public GifImageDecoder(InputStreamImageSource src, InputStream is) {
077: super (src, is);
078: }
079:
080: /**
081: * An error has occurred. Throw an exception.
082: */
083: private static void error(String s1) throws ImageFormatException {
084: throw new ImageFormatException(s1);
085: }
086:
087: /**
088: * Read a number of bytes into a buffer.
089: * @return number of bytes that were not read due to EOF or error
090: */
091: private int readBytes(byte buf[], int off, int len) {
092: while (len > 0) {
093: try {
094: int n = input.read(buf, off, len);
095: if (n < 0) {
096: break;
097: }
098: off += n;
099: len -= n;
100: } catch (IOException e) {
101: break;
102: }
103: }
104: return len;
105: }
106:
107: private static final int ExtractByte(byte buf[], int off) {
108: return (buf[off] & 0xFF);
109: }
110:
111: private static final int ExtractWord(byte buf[], int off) {
112: return (buf[off] & 0xFF) | ((buf[off + 1] & 0xFF) << 8);
113: }
114:
115: /**
116: * produce an image from the stream.
117: */
118: public void produceImage() throws IOException, ImageFormatException {
119: try {
120: readHeader();
121:
122: int totalframes = 0;
123: int frameno = 0;
124: int nloops = -1;
125: int disposal_method = 0;
126: int delay = -1;
127: boolean loopsRead = false;
128: boolean isAnimation = false;
129:
130: while (!aborted) {
131: int code;
132:
133: switch (code = input.read()) {
134: case EXBLOCK:
135: switch (code = input.read()) {
136: case EX_GRAPHICS_CONTROL: {
137: byte buf[] = new byte[6];
138: if (readBytes(buf, 0, 6) != 0) {
139: return;//error("corrupt GIF file");
140: }
141: if ((buf[0] != 4) || (buf[5] != 0)) {
142: return;//error("corrupt GIF file (GCE size)");
143: }
144: // Get the index of the transparent color
145: delay = ExtractWord(buf, 2) * 10;
146: if (delay > 0 && !isAnimation) {
147: isAnimation = true;
148: ImageFetcher.startingAnimation();
149: }
150: disposal_method = (buf[1] >> 2) & 7;
151: if ((buf[1] & TRANSPARENCYMASK) != 0) {
152: trans_pixel = ExtractByte(buf, 4);
153: } else {
154: trans_pixel = -1;
155: }
156: break;
157: }
158:
159: case EX_COMMENT:
160: case EX_APPLICATION:
161: default:
162: boolean loop_tag = false;
163: String comment = "";
164: while (true) {
165: int n = input.read();
166: if (n <= 0) {
167: break;
168: }
169: byte buf[] = new byte[n];
170: if (readBytes(buf, 0, n) != 0) {
171: return;//error("corrupt GIF file");
172: }
173: if (code == EX_COMMENT) {
174: comment += new String(buf, 0);
175: } else if (code == EX_APPLICATION) {
176: if (loop_tag) {
177: if (n == 3 && buf[0] == 1) {
178: if (loopsRead) {
179: ExtractWord(buf, 1);
180: } else {
181: nloops = ExtractWord(buf, 1);
182: loopsRead = true;
183: }
184: } else {
185: loop_tag = false;
186: }
187: }
188: if ("NETSCAPE2.0".equals(new String(
189: buf, 0))) {
190: loop_tag = true;
191: }
192: }
193: }
194: if (code == EX_COMMENT) {
195: props.put("comment", comment);
196: }
197: if (loop_tag && !isAnimation) {
198: isAnimation = true;
199: ImageFetcher.startingAnimation();
200: }
201: break;
202:
203: case -1:
204: return; //error("corrupt GIF file");
205: }
206: break;
207:
208: case IMAGESEP:
209: if (!isAnimation) {
210: input.mark(0); // we don't need the mark buffer
211: }
212: try {
213: if (!readImage(totalframes == 0,
214: disposal_method, delay)) {
215: return;
216: }
217: } catch (Exception e) {
218: if (verbose) {
219: e.printStackTrace();
220: }
221: return;
222: }
223: frameno++;
224: totalframes++;
225: break;
226:
227: default:
228: case -1:
229: if (verbose) {
230: if (code == -1) {
231: System.err
232: .println("Premature EOF in GIF file,"
233: + " frame " + frameno);
234: } else {
235: System.err
236: .println("corrupt GIF file (parse) ["
237: + code + "].");
238: }
239: }
240: if (frameno == 0) {
241: return;
242: }
243: // NOBREAK
244:
245: case TERMINATOR:
246: if (nloops == 0 || nloops-- >= 0) {
247: try {
248: if (curframe != null) {
249: curframe.dispose();
250: curframe = null;
251: }
252: input.reset();
253: saved_image = null;
254: saved_model = null;
255: frameno = 0;
256: break;
257: } catch (IOException e) {
258: return; // Unable to reset input buffer
259: }
260: }
261: if (verbose && frameno != 1) {
262: System.out.println("processing GIF terminator,"
263: + " frames: " + frameno + " total: "
264: + totalframes);
265: }
266: imageComplete(ImageConsumer.STATICIMAGEDONE, true);
267: return;
268: }
269: }
270: } finally {
271: close();
272: }
273: }
274:
275: /**
276: * Read Image header
277: */
278: private void readHeader() throws IOException, ImageFormatException {
279: // Create a buffer
280: byte buf[] = new byte[13];
281:
282: // Read the header
283: if (readBytes(buf, 0, 13) != 0) {
284: throw new IOException();
285: }
286:
287: // Check header
288: if ((buf[0] != 'G') || (buf[1] != 'I') || (buf[2] != 'F')) {
289: error("not a GIF file.");
290: }
291:
292: // Global width&height
293: global_width = ExtractWord(buf, 6);
294: global_height = ExtractWord(buf, 8);
295:
296: // colormap info
297: int ch = ExtractByte(buf, 10);
298: if ((ch & COLORMAPMASK) == 0) {
299: // no global colormap so make up our own
300: // If there is a local colormap, it will override what we
301: // have here. If there is not a local colormap, the rules
302: // for GIF89 say that we can use whatever colormap we want.
303: // This means that we should probably put in a full 256 colormap
304: // at some point. REMIND!
305: num_global_colors = 2;
306: global_bgpixel = 0;
307: global_colormap = new byte[2 * 3];
308: global_colormap[0] = global_colormap[1] = global_colormap[2] = (byte) 0;
309: global_colormap[3] = global_colormap[4] = global_colormap[5] = (byte) 255;
310:
311: } else {
312: num_global_colors = 1 << ((ch & 0x7) + 1);
313:
314: global_bgpixel = ExtractByte(buf, 11);
315:
316: if (buf[12] != 0) {
317: props.put("aspectratio", ""
318: + ((ExtractByte(buf, 12) + 15) / 64.0));
319: }
320:
321: // Read colors
322: global_colormap = new byte[num_global_colors * 3];
323: if (readBytes(global_colormap, 0, num_global_colors * 3) != 0) {
324: throw new IOException();
325: }
326: }
327: input.mark(Integer.MAX_VALUE); // set this mark in case this is an animated GIF
328: }
329:
330: /**
331: * The ImageConsumer hints flag for a non-interlaced GIF image.
332: */
333: private static final int normalflags = ImageConsumer.TOPDOWNLEFTRIGHT
334: | ImageConsumer.COMPLETESCANLINES
335: | ImageConsumer.SINGLEPASS | ImageConsumer.SINGLEFRAME;
336:
337: /**
338: * The ImageConsumer hints flag for an interlaced GIF image.
339: */
340: private static final int interlaceflags = ImageConsumer.RANDOMPIXELORDER
341: | ImageConsumer.COMPLETESCANLINES
342: | ImageConsumer.SINGLEPASS | ImageConsumer.SINGLEFRAME;
343:
344: private short prefix[] = new short[4096];
345: private byte suffix[] = new byte[4096];
346: private byte outCode[] = new byte[4097];
347:
348: private static native void initIDs();
349:
350: static {
351: /* ensure that the necessary native libraries are loaded */
352: NativeLibLoader.loadLibraries();
353: initIDs();
354: }
355:
356: private native boolean parseImage(int x, int y, int width,
357: int height, boolean interlace, int initCodeSize,
358: byte block[], byte rasline[], IndexColorModel model);
359:
360: private int sendPixels(int x, int y, int width, int height,
361: byte rasline[], ColorModel model) {
362: int rasbeg, rasend, x2;
363: if (y < 0) {
364: height += y;
365: y = 0;
366: }
367: if (y + height > global_height) {
368: height = global_height - y;
369: }
370: if (height <= 0) {
371: return 1;
372: }
373: // rasline[0] == pixel at coordinate (x,y)
374: // rasline[width] == pixel at coordinate (x+width, y)
375: if (x < 0) {
376: rasbeg = -x;
377: width += x; // same as (width -= rasbeg)
378: x2 = 0; // same as (x2 = x + rasbeg)
379: } else {
380: rasbeg = 0;
381: // width -= 0; // same as (width -= rasbeg)
382: x2 = x; // same as (x2 = x + rasbeg)
383: }
384: // rasline[rasbeg] == pixel at coordinate (x2,y)
385: // rasline[width] == pixel at coordinate (x+width, y)
386: // rasline[rasbeg + width] == pixel at coordinate (x2+width, y)
387: if (x2 + width > global_width) {
388: width = global_width - x2;
389: }
390: if (width <= 0) {
391: return 1;
392: }
393: rasend = rasbeg + width;
394: // rasline[rasbeg] == pixel at coordinate (x2,y)
395: // rasline[rasend] == pixel at coordinate (x2+width, y)
396: int off = y * global_width + x2;
397: boolean save = (curframe.disposal_method == GifFrame.DISPOSAL_SAVE);
398: if (trans_pixel >= 0 && !curframe.initialframe) {
399: if (saved_image != null && model.equals(saved_model)) {
400: for (int i = rasbeg; i < rasend; i++, off++) {
401: byte pixel = rasline[i];
402: if ((pixel & 0xff) == trans_pixel) {
403: rasline[i] = saved_image[off];
404: } else if (save) {
405: saved_image[off] = pixel;
406: }
407: }
408: } else {
409: // We have to do this the hard way - only transmit
410: // the non-transparent sections of the line...
411: // Fix for 6301050: the interlacing is ignored in this case
412: // in order to avoid artefacts in case of animated images.
413: int runstart = -1;
414: int count = 1;
415: for (int i = rasbeg; i < rasend; i++, off++) {
416: byte pixel = rasline[i];
417: if ((pixel & 0xff) == trans_pixel) {
418: if (runstart >= 0) {
419: count = setPixels(x + runstart, y, i
420: - runstart, 1, model, rasline,
421: runstart, 0);
422: if (count == 0) {
423: break;
424: }
425: }
426: runstart = -1;
427: } else {
428: if (runstart < 0) {
429: runstart = i;
430: }
431: if (save) {
432: saved_image[off] = pixel;
433: }
434: }
435: }
436: if (runstart >= 0) {
437: count = setPixels(x + runstart, y, rasend
438: - runstart, 1, model, rasline, runstart, 0);
439: }
440: return count;
441: }
442: } else if (save) {
443: System.arraycopy(rasline, rasbeg, saved_image, off, width);
444: }
445: int count = setPixels(x2, y, width, height, model, rasline,
446: rasbeg, 0);
447: return count;
448: }
449:
450: /**
451: * Read Image data
452: */
453: private boolean readImage(boolean first, int disposal_method,
454: int delay) throws IOException {
455: if (curframe != null && !curframe.dispose()) {
456: abort();
457: return false;
458: }
459:
460: long tm = 0;
461:
462: if (verbose) {
463: tm = System.currentTimeMillis();
464: }
465:
466: // Allocate the buffer
467: byte block[] = new byte[256 + 3];
468:
469: // Read the image descriptor
470: if (readBytes(block, 0, 10) != 0) {
471: throw new IOException();
472: }
473: int x = ExtractWord(block, 0);
474: int y = ExtractWord(block, 2);
475: int width = ExtractWord(block, 4);
476: int height = ExtractWord(block, 6);
477:
478: /*
479: * Majority of gif images have
480: * same logical screen and frame dimensions.
481: * Also, Photoshop and Mozilla seem to use the logical
482: * screen dimension (from the global stream header)
483: * if frame dimension is invalid.
484: *
485: * We use similar heuristic and trying to recover
486: * frame width from logical screen dimension and
487: * frame offset.
488: */
489: if (width == 0 && global_width != 0) {
490: width = global_width - x;
491: }
492: if (height == 0 && global_height != 0) {
493: height = global_height - y;
494: }
495:
496: boolean interlace = (block[8] & INTERLACEMASK) != 0;
497:
498: IndexColorModel model = global_model;
499:
500: if ((block[8] & COLORMAPMASK) != 0) {
501: // We read one extra byte above so now when we must
502: // transfer that byte as the first colormap byte
503: // and manually read the code size when we are done
504: int num_local_colors = 1 << ((block[8] & 0x7) + 1);
505:
506: // Read local colors
507: byte[] local_colormap = new byte[num_local_colors * 3];
508: local_colormap[0] = block[9];
509: if (readBytes(local_colormap, 1, num_local_colors * 3 - 1) != 0) {
510: throw new IOException();
511: }
512:
513: // Now read the "real" code size byte which follows
514: // the local color table
515: if (readBytes(block, 9, 1) != 0) {
516: throw new IOException();
517: }
518: if (trans_pixel >= num_local_colors) {
519: // Fix for 4233748: extend colormap to contain transparent pixel
520: num_local_colors = trans_pixel + 1;
521: local_colormap = grow_colormap(local_colormap,
522: num_local_colors);
523: }
524: model = new IndexColorModel(8, num_local_colors,
525: local_colormap, 0, false, trans_pixel);
526: } else if (model == null
527: || trans_pixel != model.getTransparentPixel()) {
528: if (trans_pixel >= num_global_colors) {
529: // Fix for 4233748: extend colormap to contain transparent pixel
530: num_global_colors = trans_pixel + 1;
531: global_colormap = grow_colormap(global_colormap,
532: num_global_colors);
533: }
534: model = new IndexColorModel(8, num_global_colors,
535: global_colormap, 0, false, trans_pixel);
536: global_model = model;
537: }
538:
539: // Notify the consumers
540: if (first) {
541: if (global_width == 0)
542: global_width = width;
543: if (global_height == 0)
544: global_height = height;
545:
546: setDimensions(global_width, global_height);
547: setProperties(props);
548: setColorModel(model);
549: headerComplete();
550: }
551:
552: if (disposal_method == GifFrame.DISPOSAL_SAVE
553: && saved_image == null) {
554: saved_image = new byte[global_width * global_height];
555: /*
556: * If height of current image is smaller than the global height,
557: * fill the gap with transparent pixels.
558: */
559: if ((height < global_height) && (model != null)) {
560: byte tpix = (byte) model.getTransparentPixel();
561: if (tpix >= 0) {
562: byte trans_rasline[] = new byte[global_width];
563: for (int i = 0; i < global_width; i++) {
564: trans_rasline[i] = tpix;
565: }
566:
567: setPixels(0, 0, global_width, y, model,
568: trans_rasline, 0, 0);
569: setPixels(0, y + height, global_width,
570: global_height - height - y, model,
571: trans_rasline, 0, 0);
572: }
573: }
574: }
575:
576: int hints = (interlace ? interlaceflags : normalflags);
577: setHints(hints);
578:
579: curframe = new GifFrame(this , disposal_method, delay,
580: (curframe == null), model, x, y, width, height);
581:
582: // allocate the raster data
583: byte rasline[] = new byte[width];
584:
585: if (verbose) {
586: System.out.print("Reading a " + width + " by " + height
587: + " " + (interlace ? "" : "non-")
588: + "interlaced image...");
589: }
590:
591: boolean ret = parseImage(x, y, width, height, interlace,
592: ExtractByte(block, 9), block, rasline, model);
593:
594: if (!ret) {
595: abort();
596: }
597:
598: if (verbose) {
599: System.out.println("done in "
600: + (System.currentTimeMillis() - tm) + "ms");
601: }
602:
603: return ret;
604: }
605:
606: public static byte[] grow_colormap(byte[] colormap, int newlen) {
607: byte[] newcm = new byte[newlen * 3];
608: System.arraycopy(colormap, 0, newcm, 0, colormap.length);
609: return newcm;
610: }
611: }
612:
613: class GifFrame {
614: private static final boolean verbose = false;
615: private static IndexColorModel trans_model;
616:
617: static final int DISPOSAL_NONE = 0x00;
618: static final int DISPOSAL_SAVE = 0x01;
619: static final int DISPOSAL_BGCOLOR = 0x02;
620: static final int DISPOSAL_PREVIOUS = 0x03;
621:
622: GifImageDecoder decoder;
623:
624: int disposal_method;
625: int delay;
626:
627: IndexColorModel model;
628:
629: int x;
630: int y;
631: int width;
632: int height;
633:
634: boolean initialframe;
635:
636: public GifFrame(GifImageDecoder id, int dm, int dl, boolean init,
637: IndexColorModel cm, int x, int y, int w, int h) {
638: this .decoder = id;
639: this .disposal_method = dm;
640: this .delay = dl;
641: this .model = cm;
642: this .initialframe = init;
643: this .x = x;
644: this .y = y;
645: this .width = w;
646: this .height = h;
647: }
648:
649: private void setPixels(int x, int y, int w, int h, ColorModel cm,
650: byte[] pix, int off, int scan) {
651: decoder.setPixels(x, y, w, h, cm, pix, off, scan);
652: }
653:
654: public boolean dispose() {
655: if (decoder.imageComplete(ImageConsumer.SINGLEFRAMEDONE, false) == 0) {
656: return false;
657: } else {
658: if (delay > 0) {
659: try {
660: if (verbose) {
661: System.out.println("sleeping: " + delay);
662: }
663: Thread.sleep(delay);
664: } catch (InterruptedException e) {
665: return false;
666: }
667: } else {
668: Thread.yield();
669: }
670:
671: if (verbose && disposal_method != 0) {
672: System.out.println("disposal method: "
673: + disposal_method);
674: }
675:
676: int global_width = decoder.global_width;
677: int global_height = decoder.global_height;
678:
679: if (x < 0) {
680: width += x;
681: x = 0;
682: }
683: if (x + width > global_width) {
684: width = global_width - x;
685: }
686: if (width <= 0) {
687: disposal_method = DISPOSAL_NONE;
688: } else {
689: if (y < 0) {
690: height += y;
691: y = 0;
692: }
693: if (y + height > global_height) {
694: height = global_height - y;
695: }
696: if (height <= 0) {
697: disposal_method = DISPOSAL_NONE;
698: }
699: }
700:
701: switch (disposal_method) {
702: case DISPOSAL_PREVIOUS:
703: byte[] saved_image = decoder.saved_image;
704: IndexColorModel saved_model = decoder.saved_model;
705: if (saved_image != null) {
706: setPixels(x, y, width, height, saved_model,
707: saved_image, y * global_width + x,
708: global_width);
709: }
710: break;
711: case DISPOSAL_BGCOLOR:
712: byte tpix;
713: if (model.getTransparentPixel() < 0) {
714: model = trans_model;
715: if (model == null) {
716: model = new IndexColorModel(8, 1, new byte[4],
717: 0, true);
718: trans_model = model;
719: }
720: tpix = 0;
721: } else {
722: tpix = (byte) model.getTransparentPixel();
723: }
724: byte[] rasline = new byte[width];
725: if (tpix != 0) {
726: for (int i = 0; i < width; i++) {
727: rasline[i] = tpix;
728: }
729: }
730:
731: // clear saved_image using transparent pixels
732: // this will be used as the background in the next display
733: if (decoder.saved_image != null) {
734: for (int i = 0; i < global_width * global_height; i++)
735: decoder.saved_image[i] = tpix;
736: }
737:
738: setPixels(x, y, width, height, model, rasline, 0, 0);
739: break;
740: case DISPOSAL_SAVE:
741: decoder.saved_model = model;
742: break;
743: }
744: }
745: return true;
746: }
747: }
|