001 /*
002 * Copyright 1999-2004 Sun Microsystems, Inc. All Rights Reserved.
003 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
004 *
005 * This code is free software; you can redistribute it and/or modify it
006 * under the terms of the GNU General Public License version 2 only, as
007 * published by the Free Software Foundation. Sun designates this
008 * particular file as subject to the "Classpath" exception as provided
009 * by Sun in the LICENSE file that accompanied this code.
010 *
011 * This code is distributed in the hope that it will be useful, but WITHOUT
012 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
013 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
014 * version 2 for more details (a copy is included in the LICENSE file that
015 * accompanied this code).
016 *
017 * You should have received a copy of the GNU General Public License version
018 * 2 along with this work; if not, write to the Free Software Foundation,
019 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
020 *
021 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
022 * CA 95054 USA or visit www.sun.com if you need additional information or
023 * have any questions.
024 */
025
026 package javax.sound.sampled;
027
028 /**
029 * <code>DataLine</code> adds media-related functionality to its
030 * superinterface, <code>{@link Line}</code>. This functionality includes
031 * transport-control methods that start, stop, drain, and flush
032 * the audio data that passes through the line. A data line can also
033 * report the current position, volume, and audio format of the media.
034 * Data lines are used for output of audio by means of the
035 * subinterfaces <code>{@link SourceDataLine}</code> or
036 * <code>{@link Clip}</code>, which allow an application program to write data. Similarly,
037 * audio input is handled by the subinterface <code>{@link TargetDataLine}</code>,
038 * which allows data to be read.
039 * <p>
040 * A data line has an internal buffer in which
041 * the incoming or outgoing audio data is queued. The
042 * <code>{@link #drain()}</code> method blocks until this internal buffer
043 * becomes empty, usually because all queued data has been processed. The
044 * <code>{@link #flush()}</code> method discards any available queued data
045 * from the internal buffer.
046 * <p>
047 * A data line produces <code>{@link LineEvent.Type#START START}</code> and
048 * <code>{@link LineEvent.Type#STOP STOP}</code> events whenever
049 * it begins or ceases active presentation or capture of data. These events
050 * can be generated in response to specific requests, or as a result of
051 * less direct state changes. For example, if <code>{@link #start()}</code> is called
052 * on an inactive data line, and data is available for capture or playback, a
053 * <code>START</code> event will be generated shortly, when data playback
054 * or capture actually begins. Or, if the flow of data to an active data
055 * line is constricted so that a gap occurs in the presentation of data,
056 * a <code>STOP</code> event is generated.
057 * <p>
058 * Mixers often support synchronized control of multiple data lines.
059 * Synchronization can be established through the Mixer interface's
060 * <code>{@link Mixer#synchronize synchronize}</code> method.
061 * See the description of the <code>{@link Mixer Mixer}</code> interface
062 * for a more complete description.
063 *
064 * @author Kara Kytle
065 * @version 1.40, 07/05/05
066 * @see LineEvent
067 * @since 1.3
068 */
069 public interface DataLine extends Line {
070
071 /**
072 * Drains queued data from the line by continuing data I/O until the
073 * data line's internal buffer has been emptied.
074 * This method blocks until the draining is complete. Because this is a
075 * blocking method, it should be used with care. If <code>drain()</code>
076 * is invoked on a stopped line that has data in its queue, the method will
077 * block until the line is running and the data queue becomes empty. If
078 * <code>drain()</code> is invoked by one thread, and another continues to
079 * fill the data queue, the operation will not complete.
080 * This method always returns when the data line is closed.
081 *
082 * @see #flush()
083 */
084 public void drain();
085
086 /**
087 * Flushes queued data from the line. The flushed data is discarded.
088 * In some cases, not all queued data can be discarded. For example, a
089 * mixer can flush data from the buffer for a specific input line, but any
090 * unplayed data already in the output buffer (the result of the mix) will
091 * still be played. You can invoke this method after pausing a line (the
092 * normal case) if you want to skip the "stale" data when you restart
093 * playback or capture. (It is legal to flush a line that is not stopped,
094 * but doing so on an active line is likely to cause a discontinuity in the
095 * data, resulting in a perceptible click.)
096 *
097 * @see #stop()
098 * @see #drain()
099 */
100 public void flush();
101
102 /**
103 * Allows a line to engage in data I/O. If invoked on a line
104 * that is already running, this method does nothing. Unless the data in
105 * the buffer has been flushed, the line resumes I/O starting
106 * with the first frame that was unprocessed at the time the line was
107 * stopped. When audio capture or playback starts, a
108 * <code>{@link LineEvent.Type#START START}</code> event is generated.
109 *
110 * @see #stop()
111 * @see #isRunning()
112 * @see LineEvent
113 */
114 public void start();
115
116 /**
117 * Stops the line. A stopped line should cease I/O activity.
118 * If the line is open and running, however, it should retain the resources required
119 * to resume activity. A stopped line should retain any audio data in its buffer
120 * instead of discarding it, so that upon resumption the I/O can continue where it left off,
121 * if possible. (This doesn't guarantee that there will never be discontinuities beyond the
122 * current buffer, of course; if the stopped condition continues
123 * for too long, input or output samples might be dropped.) If desired, the retained data can be
124 * discarded by invoking the <code>flush</code> method.
125 * When audio capture or playback stops, a <code>{@link LineEvent.Type#STOP STOP}</code> event is generated.
126 *
127 * @see #start()
128 * @see #isRunning()
129 * @see #flush()
130 * @see LineEvent
131 */
132 public void stop();
133
134 /**
135 * Indicates whether the line is running. The default is <code>false</code>.
136 * An open line begins running when the first data is presented in response to an
137 * invocation of the <code>start</code> method, and continues
138 * until presentation ceases in response to a call to <code>stop</code> or
139 * because playback completes.
140 * @return <code>true</code> if the line is running, otherwise <code>false</code>
141 * @see #start()
142 * @see #stop()
143 */
144 public boolean isRunning();
145
146 /**
147 * Indicates whether the line is engaging in active I/O (such as playback
148 * or capture). When an inactive line becomes active, it sends a
149 * <code>{@link LineEvent.Type#START START}</code> event to its listeners. Similarly, when
150 * an active line becomes inactive, it sends a
151 * <code>{@link LineEvent.Type#STOP STOP}</code> event.
152 * @return <code>true</code> if the line is actively capturing or rendering
153 * sound, otherwise <code>false</code>
154 * @see #isOpen
155 * @see #addLineListener
156 * @see #removeLineListener
157 * @see LineEvent
158 * @see LineListener
159 */
160 public boolean isActive();
161
162 /**
163 * Obtains the current format (encoding, sample rate, number of channels,
164 * etc.) of the data line's audio data.
165 *
166 * <p>If the line is not open and has never been opened, it returns
167 * the default format. The default format is an implementation
168 * specific audio format, or, if the <code>DataLine.Info</code>
169 * object, which was used to retrieve this <code>DataLine</code>,
170 * specifies at least one fully qualified audio format, the
171 * last one will be used as the default format. Opening the
172 * line with a specific audio format (e.g.
173 * {@link SourceDataLine#open(AudioFormat)}) will override the
174 * default format.
175 *
176 * @return current audio data format
177 * @see AudioFormat
178 */
179 public AudioFormat getFormat();
180
181 /**
182 * Obtains the maximum number of bytes of data that will fit in the data line's
183 * internal buffer. For a source data line, this is the size of the buffer to
184 * which data can be written. For a target data line, it is the size of
185 * the buffer from which data can be read. Note that
186 * the units used are bytes, but will always correspond to an integral
187 * number of sample frames of audio data.
188 *
189 * @return the size of the buffer in bytes
190 */
191 public int getBufferSize();
192
193 /**
194 * Obtains the number of bytes of data currently available to the
195 * application for processing in the data line's internal buffer. For a
196 * source data line, this is the amount of data that can be written to the
197 * buffer without blocking. For a target data line, this is the amount of data
198 * available to be read by the application. For a clip, this value is always
199 * 0 because the audio data is loaded into the buffer when the clip is opened,
200 * and persists without modification until the clip is closed.
201 * <p>
202 * Note that the units used are bytes, but will always
203 * correspond to an integral number of sample frames of audio data.
204 * <p>
205 * An application is guaranteed that a read or
206 * write operation of up to the number of bytes returned from
207 * <code>available()</code> will not block; however, there is no guarantee
208 * that attempts to read or write more data will block.
209 *
210 * @return the amount of data available, in bytes
211 */
212 public int available();
213
214 /**
215 * Obtains the current position in the audio data, in sample frames.
216 * The frame position measures the number of sample
217 * frames captured by, or rendered from, the line since it was opened.
218 * This return value will wrap around after 2^31 frames. It is recommended
219 * to use <code>getLongFramePosition</code> instead.
220 *
221 * @return the number of frames already processed since the line was opened
222 * @see #getLongFramePosition()
223 */
224 public int getFramePosition();
225
226 /**
227 * Obtains the current position in the audio data, in sample frames.
228 * The frame position measures the number of sample
229 * frames captured by, or rendered from, the line since it was opened.
230 *
231 * @return the number of frames already processed since the line was opened
232 * @since 1.5
233 */
234 public long getLongFramePosition();
235
236 /**
237 * Obtains the current position in the audio data, in microseconds.
238 * The microsecond position measures the time corresponding to the number
239 * of sample frames captured by, or rendered from, the line since it was opened.
240 * The level of precision is not guaranteed. For example, an implementation
241 * might calculate the microsecond position from the current frame position
242 * and the audio sample frame rate. The precision in microseconds would
243 * then be limited to the number of microseconds per sample frame.
244 *
245 * @return the number of microseconds of data processed since the line was opened
246 */
247 public long getMicrosecondPosition();
248
249 /**
250 * Obtains the current volume level for the line. This level is a measure
251 * of the signal's current amplitude, and should not be confused with the
252 * current setting of a gain control. The range is from 0.0 (silence) to
253 * 1.0 (maximum possible amplitude for the sound waveform). The units
254 * measure linear amplitude, not decibels.
255 *
256 * @return the current amplitude of the signal in this line, or
257 * <code>{@link AudioSystem#NOT_SPECIFIED}</code>
258 */
259 public float getLevel();
260
261 /**
262 * Besides the class information inherited from its superclass,
263 * <code>DataLine.Info</code> provides additional information specific to data lines.
264 * This information includes:
265 * <ul>
266 * <li> the audio formats supported by the data line
267 * <li> the minimum and maximum sizes of its internal buffer
268 * </ul>
269 * Because a <code>Line.Info</code> knows the class of the line its describes, a
270 * <code>DataLine.Info</code> object can describe <code>DataLine</code>
271 * subinterfaces such as <code>{@link SourceDataLine}</code>,
272 * <code>{@link TargetDataLine}</code>, and <code>{@link Clip}</code>.
273 * You can query a mixer for lines of any of these types, passing an appropriate
274 * instance of <code>DataLine.Info</code> as the argument to a method such as
275 * <code>{@link Mixer#getLine Mixer.getLine(Line.Info)}</code>.
276 *
277 * @see Line.Info
278 * @author Kara Kytle
279 * @version 1.40, 07/05/05
280 * @since 1.3
281 */
282 public static class Info extends Line.Info {
283
284 private AudioFormat[] formats;
285 private int minBufferSize;
286 private int maxBufferSize;
287
288 /**
289 * Constructs a data line's info object from the specified information,
290 * which includes a set of supported audio formats and a range for the buffer size.
291 * This constructor is typically used by mixer implementations
292 * when returning information about a supported line.
293 *
294 * @param lineClass the class of the data line described by the info object
295 * @param formats set of formats supported
296 * @param minBufferSize minimum buffer size supported by the data line, in bytes
297 * @param maxBufferSize maximum buffer size supported by the data line, in bytes
298 */
299 public Info(Class<?> lineClass, AudioFormat[] formats,
300 int minBufferSize, int maxBufferSize) {
301
302 super (lineClass);
303
304 if (formats == null) {
305 this .formats = new AudioFormat[0];
306 } else {
307 this .formats = formats;
308 }
309
310 this .minBufferSize = minBufferSize;
311 this .maxBufferSize = maxBufferSize;
312 }
313
314 /**
315 * Constructs a data line's info object from the specified information,
316 * which includes a single audio format and a desired buffer size.
317 * This constructor is typically used by an application to
318 * describe a desired line.
319 *
320 * @param lineClass the class of the data line described by the info object
321 * @param format desired format
322 * @param bufferSize desired buffer size in bytes
323 */
324 public Info(Class<?> lineClass, AudioFormat format,
325 int bufferSize) {
326
327 super (lineClass);
328
329 if (format == null) {
330 this .formats = new AudioFormat[0];
331 } else {
332 AudioFormat[] formats = { format };
333 this .formats = formats;
334 }
335
336 this .minBufferSize = bufferSize;
337 this .maxBufferSize = bufferSize;
338 }
339
340 /**
341 * Constructs a data line's info object from the specified information,
342 * which includes a single audio format.
343 * This constructor is typically used by an application to
344 * describe a desired line.
345 *
346 * @param lineClass the class of the data line described by the info object
347 * @param format desired format
348 */
349 public Info(Class<?> lineClass, AudioFormat format) {
350 this (lineClass, format, AudioSystem.NOT_SPECIFIED);
351 }
352
353 /**
354 * Obtains a set of audio formats supported by the data line.
355 * Note that <code>isFormatSupported(AudioFormat)</code> might return
356 * <code>true</code> for certain additional formats that are missing from
357 * the set returned by <code>getFormats()</code>. The reverse is not
358 * the case: <code>isFormatSupported(AudioFormat)</code> is guaranteed to return
359 * <code>true</code> for all formats returned by <code>getFormats()</code>.
360 *
361 * Some fields in the AudioFormat instances can be set to
362 * {@link javax.sound.sampled.AudioSystem#NOT_SPECIFIED NOT_SPECIFIED}
363 * if that field does not apply to the format,
364 * or if the format supports a wide range of values for that field.
365 * For example, a multi-channel device supporting up to
366 * 64 channels, could set the channel field in the
367 * <code>AudioFormat</code> instances returned by this
368 * method to <code>NOT_SPECIFIED</code>.
369 *
370 * @return a set of supported audio formats.
371 * @see #isFormatSupported(AudioFormat)
372 */
373 public AudioFormat[] getFormats() {
374
375 AudioFormat[] returnedArray = new AudioFormat[formats.length];
376 System.arraycopy(formats, 0, returnedArray, 0,
377 formats.length);
378 return returnedArray;
379 }
380
381 /**
382 * Indicates whether this data line supports a particular audio format.
383 * The default implementation of this method simply returns <code>true</code> if
384 * the specified format matches any of the supported formats.
385 *
386 * @param format the audio format for which support is queried.
387 * @return <code>true</code> if the format is supported, otherwise <code>false</code>
388 * @see #getFormats
389 * @see AudioFormat#matches
390 */
391 public boolean isFormatSupported(AudioFormat format) {
392
393 for (int i = 0; i < formats.length; i++) {
394 if (format.matches(formats[i])) {
395 return true;
396 }
397 }
398
399 return false;
400 }
401
402 /**
403 * Obtains the minimum buffer size supported by the data line.
404 * @return minimum buffer size in bytes, or <code>AudioSystem.NOT_SPECIFIED</code>
405 */
406 public int getMinBufferSize() {
407 return minBufferSize;
408 }
409
410 /**
411 * Obtains the maximum buffer size supported by the data line.
412 * @return maximum buffer size in bytes, or <code>AudioSystem.NOT_SPECIFIED</code>
413 */
414 public int getMaxBufferSize() {
415 return maxBufferSize;
416 }
417
418 /**
419 * Determines whether the specified info object matches this one.
420 * To match, the superclass match requirements must be met. In
421 * addition, this object's minimum buffer size must be at least as
422 * large as that of the object specified, its maximum buffer size must
423 * be at most as large as that of the object specified, and all of its
424 * formats must match formats supported by the object specified.
425 * @return <code>true</code> if this object matches the one specified,
426 * otherwise <code>false</code>.
427 */
428 public boolean matches(Line.Info info) {
429
430 if (!(super .matches(info))) {
431 return false;
432 }
433
434 Info dataLineInfo = (Info) info;
435
436 // treat anything < 0 as NOT_SPECIFIED
437 // demo code in old Java Sound Demo used a wrong buffer calculation
438 // that would lead to arbitrary negative values
439 if ((getMaxBufferSize() >= 0)
440 && (dataLineInfo.getMaxBufferSize() >= 0)) {
441 if (getMaxBufferSize() > dataLineInfo
442 .getMaxBufferSize()) {
443 return false;
444 }
445 }
446
447 if ((getMinBufferSize() >= 0)
448 && (dataLineInfo.getMinBufferSize() >= 0)) {
449 if (getMinBufferSize() < dataLineInfo
450 .getMinBufferSize()) {
451 return false;
452 }
453 }
454
455 AudioFormat[] localFormats = getFormats();
456
457 if (localFormats != null) {
458
459 for (int i = 0; i < localFormats.length; i++) {
460 if (!(localFormats[i] == null)) {
461 if (!(dataLineInfo
462 .isFormatSupported(localFormats[i]))) {
463 return false;
464 }
465 }
466 }
467 }
468
469 return true;
470 }
471
472 /**
473 * Obtains a textual description of the data line info.
474 * @return a string description
475 */
476 public String toString() {
477
478 StringBuffer buf = new StringBuffer();
479
480 if ((formats.length == 1) && (formats[0] != null)) {
481 buf.append(" supporting format " + formats[0]);
482 } else if (getFormats().length > 1) {
483 buf.append(" supporting " + getFormats().length
484 + " audio formats");
485 }
486
487 if ((minBufferSize != AudioSystem.NOT_SPECIFIED)
488 && (maxBufferSize != AudioSystem.NOT_SPECIFIED)) {
489 buf.append(", and buffers of " + minBufferSize + " to "
490 + maxBufferSize + " bytes");
491 } else if ((minBufferSize != AudioSystem.NOT_SPECIFIED)
492 && (minBufferSize > 0)) {
493 buf.append(", and buffers of at least " + minBufferSize
494 + " bytes");
495 } else if (maxBufferSize != AudioSystem.NOT_SPECIFIED) {
496 buf.append(", and buffers of up to " + minBufferSize
497 + " bytes");
498 }
499
500 return new String(super .toString() + buf);
501 }
502 } // class Info
503
504 } // interface DataLine
|