1 /*
   2  * Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.  Oracle designates this
   8  * particular file as subject to the "Classpath" exception as provided
   9  * by Oracle in the LICENSE file that accompanied this code.
  10  *
  11  * This code is distributed in the hope that it will be useful, but WITHOUT
  12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  14  * version 2 for more details (a copy is included in the LICENSE file that
  15  * accompanied this code).
  16  *
  17  * You should have received a copy of the GNU General Public License version
  18  * 2 along with this work; if not, write to the Free Software Foundation,
  19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  20  *
  21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  22  * or visit www.oracle.com if you need additional information or have any
  23  * questions.
  24  */
  25 
  26 package javax.sound.sampled;
  27 
  28 import java.util.Arrays;
  29 
  30 /**
  31  * {@code DataLine} adds media-related functionality to its superinterface,
  32  * {@code Line}. This functionality includes transport-control methods that
  33  * start, stop, drain, and flush the audio data that passes through the line. A
  34  * data line can also report the current position, volume, and audio format of
  35  * the media. Data lines are used for output of audio by means of the
  36  * subinterfaces {@link SourceDataLine} or {@link Clip}, which allow an
  37  * application program to write data. Similarly, audio input is handled by the
  38  * subinterface {@link TargetDataLine}, which allows data to be read.
  39  * <p>
  40  * A data line has an internal buffer in which the incoming or outgoing audio
  41  * data is queued. The {@link #drain()} method blocks until this internal buffer
  42  * becomes empty, usually because all queued data has been processed. The
  43  * {@link #flush()} method discards any available queued data from the internal
  44  * buffer.
  45  * <p>
  46  * A data line produces {@link LineEvent.Type#START START} and
  47  * {@link LineEvent.Type#STOP STOP} events whenever it begins or ceases active
  48  * presentation or capture of data. These events can be generated in response to
  49  * specific requests, or as a result of less direct state changes. For example,
  50  * if {@link #start()} is called on an inactive data line, and data is available
  51  * for capture or playback, a {@code START} event will be generated shortly,
  52  * when data playback or capture actually begins. Or, if the flow of data to an
  53  * active data line is constricted so that a gap occurs in the presentation of
  54  * data, a {@code STOP} event is generated.
  55  * <p>
  56  * Mixers often support synchronized control of multiple data lines.
  57  * Synchronization can be established through the {@code Mixer} interface's
  58  * {@link Mixer#synchronize synchronize} method. See the description of the
  59  * {@link Mixer Mixer} interface for a more complete description.
  60  *
  61  * @author Kara Kytle
  62  * @see LineEvent
  63  * @since 1.3
  64  */
  65 public interface DataLine extends Line {
  66 
  67     /**
  68      * Drains queued data from the line by continuing data I/O until the data
  69      * line's internal buffer has been emptied. This method blocks until the
  70      * draining is complete. Because this is a blocking method, it should be
  71      * used with care. If {@code drain()} is invoked on a stopped line that has
  72      * data in its queue, the method will block until the line is running and
  73      * the data queue becomes empty. If {@code drain()} is invoked by one
  74      * thread, and another continues to fill the data queue, the operation will
  75      * not complete. This method always returns when the data line is closed.
  76      *
  77      * @see #flush()
  78      */
  79     void drain();
  80 
  81     /**
  82      * Flushes queued data from the line. The flushed data is discarded. In some
  83      * cases, not all queued data can be discarded. For example, a mixer can
  84      * flush data from the buffer for a specific input line, but any unplayed
  85      * data already in the output buffer (the result of the mix) will still be
  86      * played. You can invoke this method after pausing a line (the normal case)
  87      * if you want to skip the "stale" data when you restart playback or
  88      * capture. (It is legal to flush a line that is not stopped, but doing so
  89      * on an active line is likely to cause a discontinuity in the data,
  90      * resulting in a perceptible click.)
  91      *
  92      * @see #stop()
  93      * @see #drain()
  94      */
  95     void flush();
  96 
  97     /**
  98      * Allows a line to engage in data I/O. If invoked on a line that is already
  99      * running, this method does nothing. Unless the data in the buffer has been
 100      * flushed, the line resumes I/O starting with the first frame that was
 101      * unprocessed at the time the line was stopped. When audio capture or
 102      * playback starts, a {@link LineEvent.Type#START START} event is generated.
 103      *
 104      * @see #stop()
 105      * @see #isRunning()
 106      * @see LineEvent
 107      */
 108     void start();
 109 
 110     /**
 111      * Stops the line. A stopped line should cease I/O activity. If the line is
 112      * open and running, however, it should retain the resources required to
 113      * resume activity. A stopped line should retain any audio data in its
 114      * buffer instead of discarding it, so that upon resumption the I/O can
 115      * continue where it left off, if possible. (This doesn't guarantee that
 116      * there will never be discontinuities beyond the current buffer, of course;
 117      * if the stopped condition continues for too long, input or output samples
 118      * might be dropped.) If desired, the retained data can be discarded by
 119      * invoking the {@code flush} method. When audio capture or playback stops,
 120      * a {@link LineEvent.Type#STOP STOP} event is generated.
 121      *
 122      * @see #start()
 123      * @see #isRunning()
 124      * @see #flush()
 125      * @see LineEvent
 126      */
 127     void stop();
 128 
 129     /**
 130      * Indicates whether the line is running. The default is {@code false}. An
 131      * open line begins running when the first data is presented in response to
 132      * an invocation of the {@code start} method, and continues until
 133      * presentation ceases in response to a call to {@code stop} or because
 134      * playback completes.
 135      *
 136      * @return {@code true} if the line is running, otherwise {@code false}
 137      * @see #start()
 138      * @see #stop()
 139      */
 140     boolean isRunning();
 141 
 142     /**
 143      * Indicates whether the line is engaging in active I/O (such as playback or
 144      * capture). When an inactive line becomes active, it sends a
 145      * {@link LineEvent.Type#START START} event to its listeners. Similarly,
 146      * when an active line becomes inactive, it sends a
 147      * {@link LineEvent.Type#STOP STOP} event.
 148      *
 149      * @return {@code true} if the line is actively capturing or rendering
 150      *         sound, otherwise {@code false}
 151      * @see #isOpen
 152      * @see #addLineListener
 153      * @see #removeLineListener
 154      * @see LineEvent
 155      * @see LineListener
 156      */
 157     boolean isActive();
 158 
 159     /**
 160      * Obtains the current format (encoding, sample rate, number of channels,
 161      * etc.) of the data line's audio data.
 162      * <p>
 163      * If the line is not open and has never been opened, it returns the default
 164      * format. The default format is an implementation specific audio format,
 165      * or, if the {@code DataLine.Info} object, which was used to retrieve this
 166      * {@code DataLine}, specifies at least one fully qualified audio format,
 167      * the last one will be used as the default format. Opening the line with a
 168      * specific audio format (e.g. {@link SourceDataLine#open(AudioFormat)})
 169      * will override the default format.
 170      *
 171      * @return current audio data format
 172      * @see AudioFormat
 173      */
 174     AudioFormat getFormat();
 175 
 176     /**
 177      * Obtains the maximum number of bytes of data that will fit in the data
 178      * line's internal buffer. For a source data line, this is the size of the
 179      * buffer to which data can be written. For a target data line, it is the
 180      * size of the buffer from which data can be read. Note that the units used
 181      * are bytes, but will always correspond to an integral number of sample
 182      * frames of audio data.
 183      *
 184      * @return the size of the buffer, in bytes
 185      */
 186     int getBufferSize();
 187 
 188     /**
 189      * Obtains the number of bytes of data currently available to the
 190      * application for processing in the data line's internal buffer. For a
 191      * source data line, this is the amount of data that can be written to the
 192      * buffer without blocking. For a target data line, this is the amount of
 193      * data available to be read by the application. For a clip, this value is
 194      * always 0 because the audio data is loaded into the buffer when the clip
 195      * is opened, and persists without modification until the clip is closed.
 196      * <p>
 197      * Note that the units used are bytes, but will always correspond to an
 198      * integral number of sample frames of audio data.
 199      * <p>
 200      * An application is guaranteed that a read or write operation of up to the
 201      * number of bytes returned from {@code available()} will not block;
 202      * however, there is no guarantee that attempts to read or write more data
 203      * will block.
 204      *
 205      * @return the amount of data available, in bytes
 206      */
 207     int available();
 208 
 209     /**
 210      * Obtains the current position in the audio data, in sample frames. The
 211      * frame position measures the number of sample frames captured by, or
 212      * rendered from, the line since it was opened. This return value will wrap
 213      * around after 2^31 frames. It is recommended to use
 214      * {@code getLongFramePosition} instead.
 215      *
 216      * @return the number of frames already processed since the line was opened
 217      * @see #getLongFramePosition()
 218      */
 219     int getFramePosition();
 220 
 221     /**
 222      * Obtains the current position in the audio data, in sample frames. The
 223      * frame position measures the number of sample frames captured by, or
 224      * rendered from, the line since it was opened.
 225      *
 226      * @return the number of frames already processed since the line was opened
 227      * @since 1.5
 228      */
 229     long getLongFramePosition();
 230 
 231     /**
 232      * Obtains the current position in the audio data, in microseconds. The
 233      * microsecond position measures the time corresponding to the number of
 234      * sample frames captured by, or rendered from, the line since it was
 235      * opened. The level of precision is not guaranteed. For example, an
 236      * implementation might calculate the microsecond position from the current
 237      * frame position and the audio sample frame rate. The precision in
 238      * microseconds would then be limited to the number of microseconds per
 239      * sample frame.
 240      *
 241      * @return the number of microseconds of data processed since the line was
 242      *         opened
 243      */
 244     long getMicrosecondPosition();
 245 
 246     /**
 247      * Obtains the current volume level for the line. This level is a measure of
 248      * the signal's current amplitude, and should not be confused with the
 249      * current setting of a gain control. The range is from 0.0 (silence) to 1.0
 250      * (maximum possible amplitude for the sound waveform). The units measure
 251      * linear amplitude, not decibels.
 252      *
 253      * @return the current amplitude of the signal in this line, or
 254      *         {@link AudioSystem#NOT_SPECIFIED}
 255      */
 256     float getLevel();
 257 
 258     /**
 259      * Besides the class information inherited from its superclass,
 260      * {@code DataLine.Info} provides additional information specific to data
 261      * lines. This information includes:
 262      * <ul>
 263      *   <li>the audio formats supported by the data line
 264      *   <li>the minimum and maximum sizes of its internal buffer
 265      * </ul>
 266      * Because a {@code Line.Info} knows the class of the line its describes, a
 267      * {@code DataLine.Info} object can describe {@code DataLine} subinterfaces
 268      * such as {@link SourceDataLine}, {@link TargetDataLine}, and {@link Clip}.
 269      * You can query a mixer for lines of any of these types, passing an
 270      * appropriate instance of {@code DataLine.Info} as the argument to a method
 271      * such as {@link Mixer#getLine(Line.Info)}.
 272      *
 273      * @author Kara Kytle
 274      * @see Line.Info
 275      * @since 1.3
 276      */
 277     class Info extends Line.Info {
 278 
 279         /**
 280          * The set of supported formats.
 281          */
 282         private final AudioFormat[] formats;
 283 
 284         /**
 285          * Minimum buffer size supported by the data line, in bytes.
 286          */
 287         private final int minBufferSize;
 288 
 289         /**
 290          * Maximum buffer size supported by the data line, in bytes.
 291          */
 292         private final int maxBufferSize;
 293 
 294         /**
 295          * Constructs a data line's info object from the specified information,
 296          * which includes a set of supported audio formats and a range for the
 297          * buffer size. This constructor is typically used by mixer
 298          * implementations when returning information about a supported line.
 299          *
 300          * @param  lineClass the class of the data line described by the info
 301          *         object
 302          * @param  formats set of formats supported
 303          * @param  minBufferSize minimum buffer size supported by the data line,
 304          *         in bytes
 305          * @param  maxBufferSize maximum buffer size supported by the data line,
 306          *         in bytes
 307          */
 308         public Info(Class<?> lineClass, AudioFormat[] formats, int minBufferSize, int maxBufferSize) {
 309 
 310             super(lineClass);
 311 
 312             if (formats == null) {
 313                 this.formats = new AudioFormat[0];
 314             } else {
 315                 this.formats = Arrays.copyOf(formats, formats.length);
 316             }
 317 
 318             this.minBufferSize = minBufferSize;
 319             this.maxBufferSize = maxBufferSize;
 320         }
 321 
 322         /**
 323          * Constructs a data line's info object from the specified information,
 324          * which includes a single audio format and a desired buffer size. This
 325          * constructor is typically used by an application to describe a desired
 326          * line.
 327          *
 328          * @param  lineClass the class of the data line described by the info
 329          *         object
 330          * @param  format desired format
 331          * @param  bufferSize desired buffer size, in bytes
 332          */
 333         public Info(Class<?> lineClass, AudioFormat format, int bufferSize) {
 334 
 335             super(lineClass);
 336 
 337             if (format == null) {
 338                 this.formats = new AudioFormat[0];
 339             } else {
 340                 this.formats = new AudioFormat[]{format};
 341             }
 342 
 343             this.minBufferSize = bufferSize;
 344             this.maxBufferSize = bufferSize;
 345         }
 346 
 347         /**
 348          * Constructs a data line's info object from the specified information,
 349          * which includes a single audio format. This constructor is typically
 350          * used by an application to describe a desired line.
 351          *
 352          * @param  lineClass the class of the data line described by the info
 353          *         object
 354          * @param  format desired format
 355          */
 356         public Info(Class<?> lineClass, AudioFormat format) {
 357             this(lineClass, format, AudioSystem.NOT_SPECIFIED);
 358         }
 359 
 360         /**
 361          * Obtains a set of audio formats supported by the data line. Note that
 362          * {@code isFormatSupported(AudioFormat)} might return {@code true} for
 363          * certain additional formats that are missing from the set returned by
 364          * {@code getFormats()}. The reverse is not the case:
 365          * {@code isFormatSupported(AudioFormat)} is guaranteed to return
 366          * {@code true} for all formats returned by {@code getFormats()}.
 367          * <p>
 368          * Some fields in the {@code AudioFormat} instances can be set to
 369          * {@link AudioSystem#NOT_SPECIFIED NOT_SPECIFIED} if that field does
 370          * not apply to the format, or if the format supports a wide range of
 371          * values for that field. For example, a multi-channel device supporting
 372          * up to 64 channels, could set the channel field in the
 373          * {@code AudioFormat} instances returned by this method to
 374          * {@code NOT_SPECIFIED}.
 375          *
 376          * @return a set of supported audio formats
 377          * @see #isFormatSupported(AudioFormat)
 378          */
 379         public AudioFormat[] getFormats() {
 380             return Arrays.copyOf(formats, formats.length);
 381         }
 382 
 383         /**
 384          * Indicates whether this data line supports a particular audio format.
 385          * The default implementation of this method simply returns {@code true}
 386          * if the specified format matches any of the supported formats.
 387          *
 388          * @param  format the audio format for which support is queried
 389          * @return {@code true} if the format is supported, otherwise
 390          *         {@code false}
 391          * @see #getFormats
 392          * @see AudioFormat#matches
 393          */
 394         public boolean isFormatSupported(AudioFormat format) {
 395 
 396             for (int i = 0; i < formats.length; i++) {
 397                 if (format.matches(formats[i])) {
 398                     return true;
 399                 }
 400             }
 401 
 402             return false;
 403         }
 404 
 405         /**
 406          * Obtains the minimum buffer size supported by the data line.
 407          *
 408          * @return minimum buffer size in bytes, or
 409          *         {@code AudioSystem.NOT_SPECIFIED}
 410          */
 411         public int getMinBufferSize() {
 412             return minBufferSize;
 413         }
 414 
 415         /**
 416          * Obtains the maximum buffer size supported by the data line.
 417          *
 418          * @return maximum buffer size in bytes, or
 419          *         {@code AudioSystem.NOT_SPECIFIED}
 420          */
 421         public int getMaxBufferSize() {
 422             return maxBufferSize;
 423         }
 424 
 425         /**
 426          * Determines whether the specified info object matches this one. To
 427          * match, the superclass match requirements must be met. In addition,
 428          * this object's minimum buffer size must be at least as large as that
 429          * of the object specified, its maximum buffer size must be at most as
 430          * large as that of the object specified, and all of its formats must
 431          * match formats supported by the object specified.
 432          *
 433          * @param  info the info object which is being compared to this one
 434          * @return {@code true} if this object matches the one specified,
 435          *         otherwise {@code false}
 436          */
 437         @Override
 438         public boolean matches(Line.Info info) {
 439 
 440             if (! (super.matches(info)) ) {
 441                 return false;
 442             }
 443 
 444             Info dataLineInfo = (Info)info;
 445 
 446             // treat anything < 0 as NOT_SPECIFIED
 447             // demo code in old Java Sound Demo used a wrong buffer calculation
 448             // that would lead to arbitrary negative values
 449             if ((getMaxBufferSize() >= 0) && (dataLineInfo.getMaxBufferSize() >= 0)) {
 450                 if (getMaxBufferSize() > dataLineInfo.getMaxBufferSize()) {
 451                     return false;
 452                 }
 453             }
 454 
 455             if ((getMinBufferSize() >= 0) && (dataLineInfo.getMinBufferSize() >= 0)) {
 456                 if (getMinBufferSize() < dataLineInfo.getMinBufferSize()) {
 457                     return false;
 458                 }
 459             }
 460 
 461             AudioFormat[] localFormats = getFormats();
 462 
 463             if (localFormats != null) {
 464 
 465                 for (int i = 0; i < localFormats.length; i++) {
 466                     if (! (localFormats[i] == null) ) {
 467                         if (! (dataLineInfo.isFormatSupported(localFormats[i])) ) {
 468                             return false;
 469                         }
 470                     }
 471                 }
 472             }
 473 
 474             return true;
 475         }
 476 
 477         /**
 478          * Obtains a textual description of the data line info.
 479          *
 480          * @return a string description
 481          */
 482         @Override
 483         public String toString() {
 484 
 485             StringBuilder sb = new StringBuilder();
 486 
 487             if ( (formats.length == 1) && (formats[0] != null) ) {
 488                 sb.append(" supporting format " + formats[0]);
 489             } else if (getFormats().length > 1) {
 490                 sb.append(" supporting " + getFormats().length + " audio formats");
 491             }
 492 
 493             if ( (minBufferSize != AudioSystem.NOT_SPECIFIED) && (maxBufferSize != AudioSystem.NOT_SPECIFIED) ) {
 494                 sb.append(", and buffers of " + minBufferSize + " to " + maxBufferSize + " bytes");
 495             } else if ( (minBufferSize != AudioSystem.NOT_SPECIFIED) && (minBufferSize > 0) ) {
 496                 sb.append(", and buffers of at least " + minBufferSize + " bytes");
 497             } else if (maxBufferSize != AudioSystem.NOT_SPECIFIED) {
 498                 sb.append(", and buffers of up to " + minBufferSize + " bytes");
 499             }
 500 
 501             return new String(super.toString() + sb);
 502         }
 503     }
 504 }