001: /*
002: *
003: * Copyright (c) 2000 Silvere Martin-Michiellot All Rights Reserved.
004: *
005: * Silvere Martin-Michiellot grants you ("Licensee") a non-exclusive,
006: * royalty free, license to use, modify and redistribute this
007: * software in source and binary code form,
008: * provided that i) this copyright notice and license appear on all copies of
009: * the software; and ii) Licensee does not utilize the software in a manner
010: * which is disparaging to Silvere Martin-Michiellot.
011: *
012: * This software is provided "AS IS," without a warranty of any kind. ALL
013: * EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING ANY
014: * IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
015: * NON-INFRINGEMENT, ARE HEREBY EXCLUDED. Silvere Martin-Michiellot
016: * AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES
017: * SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING
018: * OR DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL
019: * Silvere Martin-Michiellot OR ITS LICENSORS BE LIABLE
020: * FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT,
021: * INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER
022: * CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF
023: * OR INABILITY TO USE SOFTWARE, EVEN IF Silvere Martin-Michiellot HAS BEEN
024: * ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
025: *
026: * This software is not designed or intended for use in on-line control of
027: * aircraft, air traffic, aircraft navigation or aircraft communications; or in
028: * the design, construction, operation or maintenance of any nuclear
029: * facility. Licensee represents and warrants that it will not use or
030: * redistribute the Software for such purposes.
031: *
032: *
033: */
034:
035: package com.db.media.in;
036:
037: import javax.media.*;
038: import javax.media.renderer.VideoRenderer;
039: import javax.media.control.*;
040: import javax.media.format.VideoFormat;
041: import javax.media.format.RGBFormat;
042:
043: import java.awt.*;
044: import java.awt.image.*;
045: import java.awt.event.*;
046:
047: import java.util.Vector;
048:
049: import javax.media.j3d.*;
050: import javax.vecmath.*;
051:
052: /*
053: * This is a JMF renderer plugin. It texure maps the incoming
054: * video frame to a 3D surface, making use of Java 3D
055: *
056: */
057:
058: public class J3DVideoRenderer implements ControllerListener,
059: VideoRenderer {
060:
061: /*************************************************************************
062: * Variables and Constants
063: *************************************************************************/
064:
065: Processor p;
066: int[] waitSync = new int[0];
067: boolean stateTransOK = true;
068:
069: public static final boolean DEBUG = false;
070:
071: // The descriptive name of this renderer
072: private static final String name = "J3DVideoRenderer";
073:
074: protected RGBFormat inputFormat;
075: protected RGBFormat supportedRGB;
076: protected Format[] supportedFormats;
077:
078: protected Buffer lastBuffer = null;
079:
080: public static final int SS = 128;
081: protected int inWidth = 320;
082: protected int inHeight = 240;
083:
084: protected Component component = null;
085: protected Rectangle reqBounds = null;
086: protected Rectangle bounds = new Rectangle();
087: protected boolean started = false;
088: protected Object lastData = null;
089:
090: Appearance app = null;
091: Texture2D tex = null;
092:
093: int count = 0;
094: boolean firstFrame;
095: int btype = 0;
096: int itype = 0;
097: int ttype = 0;
098: boolean byRef = true;
099:
100: /*************************************************************************
101: * Constructor
102: *************************************************************************/
103:
104: public J3DVideoRenderer(String url, Canvas3D canvas3D,
105: Appearance appearance) {
106:
107: MediaLocator ml;
108:
109: try {
110: ml = new MediaLocator(url);
111: if (ml == null) {
112: System.out.println("can not access url = " + url);
113: System.exit(0);
114: }
115:
116: try {
117: p = Manager.createProcessor(ml);
118: } catch (Exception ex) {
119: if (DEBUG) {
120: System.out
121: .println("failed to create a processor for movie "
122: + ml);
123: }
124: System.exit(0);
125: }
126:
127: p.addControllerListener(this );
128:
129: p.configure();
130:
131: if (!waitForState(p.Configured)) {
132: if (DEBUG) {
133: System.out
134: .println("failed to configure the processor");
135: }
136: System.exit(0);
137: }
138:
139: // use processor as a player
140: p.setContentDescriptor(null);
141:
142: // obtain the track control
143: TrackControl[] tc = p.getTrackControls();
144:
145: if (tc == null) {
146: if (DEBUG) {
147: System.out
148: .println("failed to get the track control from processor");
149: }
150: System.exit(0);
151: }
152:
153: TrackControl vtc = null;
154:
155: for (int i = 0; i < tc.length; i++) {
156: if (tc[i].getFormat() instanceof VideoFormat) {
157: vtc = tc[i];
158: break;
159: }
160:
161: }
162:
163: if (vtc == null) {
164: if (DEBUG) {
165: System.out.println("can't find video track");
166: }
167: System.exit(0);
168: }
169:
170: // Prepare supported input formats and preferred format
171:
172: supportedRGB = new RGBFormat(null, Format.NOT_SPECIFIED,
173: Format.byteArray, Format.NOT_SPECIFIED, 24, 3, 2,
174: 1, 3, Format.NOT_SPECIFIED, Format.TRUE,
175: Format.NOT_SPECIFIED);
176:
177: supportedFormats = new VideoFormat[] { supportedRGB };
178: firstFrame = true;
179:
180: String os = System.getProperty("os.name");
181: if (os.startsWith("W") || os.startsWith("w")) {
182: btype = BufferedImage.TYPE_3BYTE_BGR;
183: itype = ImageComponent.FORMAT_RGB;
184: ttype = Texture.RGB;
185: byRef = true;
186: } else if (os.startsWith("S") || os.startsWith("s")) {
187: btype = BufferedImage.TYPE_4BYTE_ABGR;
188: itype = ImageComponent.FORMAT_RGBA;
189: ttype = Texture.RGBA;
190: byRef = true;
191:
192: } else {
193: btype = BufferedImage.TYPE_3BYTE_BGR;
194: itype = ImageComponent.FORMAT_RGB;
195: ttype = Texture.RGB;
196: byRef = false;
197: }
198:
199: component = canvas3D;
200: app = appearance;
201:
202: try {
203: vtc.setRenderer(this );
204: } catch (Exception ex) {
205: ex.printStackTrace();
206: if (DEBUG) {
207: System.out
208: .println("the processor does not support effect");
209: }
210: System.exit(0);
211: }
212:
213: // prefetch
214: p.prefetch();
215: if (!waitForState(p.Prefetched)) {
216: if (DEBUG) {
217: System.out
218: .println("failed to prefech the processor");
219: }
220: System.exit(0);
221: }
222: if (DEBUG) {
223: System.out.println("end of prefetch");
224: }
225:
226: p.start();
227: if (DEBUG) {
228: System.out.println("processor start");
229: }
230:
231: } catch (Exception ex) {
232: ex.printStackTrace();
233: System.exit(0);
234: }
235:
236: }
237:
238: /****************************************************************
239: * Controls implementation
240: ****************************************************************/
241:
242: /**
243: * Returns an array of supported controls
244: **/
245: public Object[] getControls() {
246: // No controls
247: return (Object[]) new Control[0];
248: }
249:
250: /**
251: * Return the control based on a control type for the PlugIn.
252: */
253: public Object getControl(String controlType) {
254: try {
255: Class cls = Class.forName(controlType);
256: Object cs[] = getControls();
257: for (int i = 0; i < cs.length; i++) {
258: if (cls.isInstance(cs[i]))
259: return cs[i];
260: }
261: return null;
262: } catch (Exception e) { // no such controlType or such control
263: return null;
264: }
265: }
266:
267: /*************************************************************************
268: * PlugIn implementation
269: *************************************************************************/
270:
271: public String getName() {
272: return name;
273: }
274:
275: /**
276: * Opens the plugin
277: */
278: public void open() throws ResourceUnavailableException {
279: firstFrame = true;
280:
281: // Create appearance object for textured obj
282: tex = createTexture();
283:
284: app.setCapability(Appearance.ALLOW_TEXTURE_WRITE);
285: tex.setCapability(Texture.ALLOW_IMAGE_WRITE);
286:
287: app.setTexture(tex);
288: TextureAttributes texAttr = new TextureAttributes();
289: texAttr.setTextureMode(TextureAttributes.MODULATE);
290: app.setTextureAttributes(texAttr);
291:
292: count = 0;
293: // System.out.println("end of open");
294:
295: }
296:
297: /**
298: * Resets the state of the plug-in. Typically at end of media or when media
299: * is repositioned.
300: */
301: public void reset() {
302: // Nothing to do
303: }
304:
305: public synchronized void close() {
306:
307: }
308:
309: /*************************************************************************
310: * Renderer implementation
311: *************************************************************************/
312:
313: public void start() {
314: started = true;
315: }
316:
317: public void stop() {
318: started = false;
319: }
320:
321: /**
322: * Lists the possible input formats supported by this plug-in.
323: */
324: public Format[] getSupportedInputFormats() {
325: return supportedFormats;
326: }
327:
328: /**
329: * Set the data input format.
330: */
331: public Format setInputFormat(Format format) {
332: if (format != null && format instanceof RGBFormat
333: && format.matches(supportedRGB)) {
334:
335: inputFormat = (RGBFormat) format;
336: Dimension size = inputFormat.getSize();
337: inWidth = size.width;
338: inHeight = size.height;
339: // System.out.println("in setInputFormat = " + format);
340: return format;
341: } else
342: return null;
343: }
344:
345: /**
346: * Processes the data and renders it to a component
347: */
348:
349: public int process(Buffer buffer) {
350:
351: if (buffer.getLength() <= 0)
352: return BUFFER_PROCESSED_OK;
353:
354: if (count < 0) {
355: count++;
356: try {
357: Thread.currentThread().sleep(50);
358: } catch (Exception ex) {
359: ex.printStackTrace();
360: }
361:
362: return BUFFER_PROCESSED_OK;
363: }
364:
365: count = 0;
366:
367: byte[] rawData = (byte[]) (buffer.getData());
368:
369: // System.out.println("inWidth = " + inWidth);
370: // System.out.println("inHeight = " + inHeight);
371:
372: BufferedImage bimg = new BufferedImage(SS, SS, btype);
373: byte[] byteData = ((DataBufferByte) bimg.getRaster()
374: .getDataBuffer()).getData();
375:
376: int op, ip, x, y;
377: byte alpha_1 = (byte) 0xff;
378: op = 0;
379: int lineStride = 3 * inWidth;
380:
381: // scale from inWidth x inHeight to SS x SS
382: if (btype == BufferedImage.TYPE_3BYTE_BGR) {
383: for (int i = 0; i < SS; i++)
384: for (int j = 0; j < SS; j++) {
385: x = (inWidth * j) >> 7;
386: y = (inHeight * i) >> 7;
387:
388: if (x >= inWidth || y >= inHeight) {
389: byteData[op++] = 0;
390: byteData[op++] = 0;
391: byteData[op++] = 0;
392: } else {
393: ip = y * lineStride + x * 3;
394: byteData[op++] = rawData[ip++];
395: byteData[op++] = rawData[ip++];
396: byteData[op++] = rawData[ip++];
397: }
398: }
399: } else { // in 4BYTE_ABGR format
400: for (int i = 0; i < SS; i++)
401: for (int j = 0; j < SS; j++) {
402: x = (inWidth * j) >> 7;
403: y = (inHeight * i) >> 7;
404:
405: if (x >= inWidth || y >= inHeight) {
406: byteData[op++] = alpha_1;
407: byteData[op++] = 0;
408: byteData[op++] = 0;
409: byteData[op++] = 0;
410: } else {
411: ip = y * lineStride + x * 3;
412: byteData[op++] = alpha_1;
413: byteData[op++] = rawData[ip++];
414: byteData[op++] = rawData[ip++];
415: byteData[op++] = rawData[ip++];
416: }
417: }
418: }
419:
420: ImageComponent2D imgcmp = new ImageComponent2D(itype, bimg,
421: byRef, true);
422: tex.setImage(0, imgcmp);
423: app.setTexture(tex);
424:
425: if (firstFrame) {
426: firstFrame = false;
427: // u.addBranchGraph(scene);
428: try {
429: // give J3D more time to initialize
430: Thread.currentThread().sleep(5500);
431: } catch (Exception ex) {
432: ex.printStackTrace();
433: }
434: } else {
435: try {
436: Thread.currentThread().sleep(30);
437: } catch (Exception ex) {
438: ex.printStackTrace();
439: }
440: }
441:
442: // System.out.println("in doProcess");
443: return BUFFER_PROCESSED_OK;
444: }
445:
446: private Texture2D createTexture() {
447: int j = 0;
448: byte alpha_1 = (byte) 0xff;
449:
450: BufferedImage bimg = new BufferedImage(SS, SS, btype);
451: byte[] byteData = ((DataBufferByte) bimg.getRaster()
452: .getDataBuffer()).getData();
453: if (btype == BufferedImage.TYPE_4BYTE_ABGR) {
454: j = 0;
455: for (int i = 0; i < SS * SS; i++) {
456: byteData[j] = alpha_1;
457: byteData[j + 1] = (byte) 192;
458: byteData[j + 2] = (byte) 0;
459: byteData[j + 3] = (byte) 192;
460: j += 4;
461: }
462: } else {
463: j = 0;
464: for (int i = 0; i < SS * SS; i++) {
465: byteData[j] = (byte) 192;
466: byteData[j + 1] = (byte) 0;
467: byteData[j + 2] = (byte) 192;
468: j += 3;
469: }
470:
471: }
472: ImageComponent2D imgcmp = new ImageComponent2D(itype, bimg,
473: byRef, true);
474: Texture2D tex1 = new Texture2D(Texture2D.BASE_LEVEL, ttype, SS,
475: SS);
476: tex1.setImage(0, imgcmp);
477: return tex1;
478: }
479:
480: /*************************************************************************
481: * Controller implementation
482: *************************************************************************/
483:
484: public void controllerUpdate(ControllerEvent evt) {
485: if (evt instanceof ConfigureCompleteEvent
486: || evt instanceof RealizeCompleteEvent
487: || evt instanceof PrefetchCompleteEvent) {
488: synchronized (waitSync) {
489: stateTransOK = true;
490: waitSync.notifyAll();
491: }
492: } else if (evt instanceof ResourceUnavailableEvent) {
493: synchronized (waitSync) {
494: stateTransOK = false;
495: waitSync.notifyAll();
496: }
497: } else if (evt instanceof EndOfMediaEvent) {
498: p.setMediaTime(new Time(0));
499: p.start();
500: // p.close();
501: // System.exit(0);
502: }
503: }
504:
505: public boolean waitForState(int state) {
506: synchronized (waitSync) {
507: try {
508: while (p.getState() != state && stateTransOK) {
509: waitSync.wait();
510: }
511: } catch (Exception ex) {
512: }
513:
514: return stateTransOK;
515: }
516: }
517:
518: /****************************************************************
519: * VideoRenderer implementation
520: ****************************************************************/
521:
522: /**
523: * Returns an AWT component that it will render to. Returns null
524: * if it is not rendering to an AWT component.
525: */
526: public java.awt.Component getComponent() {
527: return component;
528: }
529:
530: /**
531: * Requests the renderer to draw into a specified AWT component.
532: * Returns false if the renderer cannot draw into the specified
533: * component.
534: */
535: public boolean setComponent(java.awt.Component comp) {
536: if (comp instanceof javax.media.j3d.Canvas3D) {
537: component = comp;
538: return true;
539: } else
540: return false;
541: }
542:
543: /**
544: * Sets the region in the component where the video is to be
545: * rendered to. Video is to be scaled if necessary. If <code>rect</code>
546: * is null, then the video occupies the entire component.
547: */
548: public void setBounds(java.awt.Rectangle rect) {
549: reqBounds = rect;
550: }
551:
552: /**
553: * Returns the region in the component where the video will be
554: * rendered to. Returns null if the entire component is being used.
555: */
556: public java.awt.Rectangle getBounds() {
557: return reqBounds;
558: }
559:
560: /*************************************************************************
561: * Local methods
562: *************************************************************************/
563:
564: int getInWidth() {
565: return inWidth;
566: }
567:
568: int getInHeight() {
569: return inHeight;
570: }
571:
572: }
|