1 /* 2 * Copyright (c) 1999, 2014, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. Oracle designates this 8 * particular file as subject to the "Classpath" exception as provided 9 * by Oracle in the LICENSE file that accompanied this code. 10 * 11 * This code is distributed in the hope that it will be useful, but WITHOUT 12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 * version 2 for more details (a copy is included in the LICENSE file that 15 * accompanied this code). 16 * 17 * You should have received a copy of the GNU General Public License version 18 * 2 along with this work; if not, write to the Free Software Foundation, 19 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 20 * 21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 22 * or visit www.oracle.com if you need additional information or have any 23 * questions. 24 */ 25 26 package javax.sound.sampled; 27 28 import java.util.Collections; 29 import java.util.HashMap; 30 import java.util.Map; 31 32 /** 33 * {@code AudioFormat} is the class that specifies a particular arrangement of 34 * data in a sound stream. By examining the information stored in the audio 35 * format, you can discover how to interpret the bits in the binary sound data. 36 * <p> 37 * Every data line has an audio format associated with its data stream. The 38 * audio format of a source (playback) data line indicates what kind of data the 39 * data line expects to receive for output. For a target (capture) data line, 40 * the audio format specifies the kind of the data that can be read from the 41 * line. 42 * Sound files also have audio formats, of course. The {@link AudioFileFormat} 43 * class encapsulates an {@code AudioFormat} in addition to other, file-specific 44 * information. Similarly, an {@link AudioInputStream} has an 45 * {@code AudioFormat}. 46 * <p> 47 * The {@code AudioFormat} class accommodates a number of common sound-file 48 * encoding techniques, including pulse-code modulation (PCM), mu-law encoding, 49 * and a-law encoding. These encoding techniques are predefined, but service 50 * providers can create new encoding types. The encoding that a specific format 51 * uses is named by its {@code encoding} field. 52 * <p> 53 * In addition to the encoding, the audio format includes other properties that 54 * further specify the exact arrangement of the data. These include the number 55 * of channels, sample rate, sample size, byte order, frame rate, and frame 56 * size. Sounds may have different numbers of audio channels: one for mono, two 57 * for stereo. The sample rate measures how many "snapshots" (samples) of the 58 * sound pressure are taken per second, per channel. (If the sound is stereo 59 * rather than mono, two samples are actually measured at each instant of time: 60 * one for the left channel, and another for the right channel; however, the 61 * sample rate still measures the number per channel, so the rate is the same 62 * regardless of the number of channels. This is the standard use of the term.) 63 * The sample size indicates how many bits are used to store each snapshot; 8 64 * and 16 are typical values. For 16-bit samples (or any other sample size 65 * larger than a byte), byte order is important; the bytes in each sample are 66 * arranged in either the "little-endian" or "big-endian" style. For encodings 67 * like PCM, a frame consists of the set of samples for all channels at a given 68 * point in time, and so the size of a frame (in bytes) is always equal to the 69 * size of a sample (in bytes) times the number of channels. However, with some 70 * other sorts of encodings a frame can contain a bundle of compressed data for 71 * a whole series of samples, as well as additional, non-sample data. For such 72 * encodings, the sample rate and sample size refer to the data after it is 73 * decoded into PCM, and so they are completely different from the frame rate 74 * and frame size. 75 * <p> 76 * An {@code AudioFormat} object can include a set of properties. A property is 77 * a pair of key and value: the key is of type {@code String}, the associated 78 * property value is an arbitrary object. Properties specify additional format 79 * specifications, like the bit rate for compressed formats. Properties are 80 * mainly used as a means to transport additional information of the audio 81 * format to and from the service providers. Therefore, properties are ignored 82 * in the {@link #matches(AudioFormat)} method. However, methods which rely on 83 * the installed service providers, like 84 * {@link AudioSystem#isConversionSupported (AudioFormat, AudioFormat) 85 * isConversionSupported} may consider properties, depending on the respective 86 * service provider implementation. 87 * <p> 88 * The following table lists some common properties which service providers 89 * should use, if applicable: 90 * 91 * <table border=0> 92 * <caption>Audio Format Properties</caption> 93 * <tr> 94 * <th>Property key</th> 95 * <th>Value type</th> 96 * <th>Description</th> 97 * </tr> 98 * <tr> 99 * <td>"bitrate"</td> 100 * <td>{@link java.lang.Integer Integer}</td> 101 * <td>average bit rate in bits per second</td> 102 * </tr> 103 * <tr> 104 * <td>"vbr"</td> 105 * <td>{@link java.lang.Boolean Boolean}</td> 106 * <td>{@code true}, if the file is encoded in variable bit 107 * rate (VBR)</td> 108 * </tr> 109 * <tr> 110 * <td>"quality"</td> 111 * <td>{@link java.lang.Integer Integer}</td> 112 * <td>encoding/conversion quality, 1..100</td> 113 * </tr> 114 * </table> 115 * <p> 116 * Vendors of service providers (plugins) are encouraged to seek information 117 * about other already established properties in third party plugins, and follow 118 * the same conventions. 119 * 120 * @author Kara Kytle 121 * @author Florian Bomers 122 * @see DataLine#getFormat 123 * @see AudioInputStream#getFormat 124 * @see AudioFileFormat 125 * @see javax.sound.sampled.spi.FormatConversionProvider 126 * @since 1.3 127 */ 128 public class AudioFormat { 129 130 /** 131 * The audio encoding technique used by this format. 132 */ 133 protected Encoding encoding; 134 135 /** 136 * The number of samples played or recorded per second, for sounds that have 137 * this format. 138 */ 139 protected float sampleRate; 140 141 /** 142 * The number of bits in each sample of a sound that has this format. 143 */ 144 protected int sampleSizeInBits; 145 146 /** 147 * The number of audio channels in this format (1 for mono, 2 for stereo). 148 */ 149 protected int channels; 150 151 /** 152 * The number of bytes in each frame of a sound that has this format. 153 */ 154 protected int frameSize; 155 156 /** 157 * The number of frames played or recorded per second, for sounds that have 158 * this format. 159 */ 160 protected float frameRate; 161 162 /** 163 * Indicates whether the audio data is stored in big-endian or little-endian 164 * order. 165 */ 166 protected boolean bigEndian; 167 168 /** 169 * The set of properties. 170 */ 171 private HashMap<String, Object> properties; 172 173 /** 174 * Constructs an {@code AudioFormat} with the given parameters. The encoding 175 * specifies the convention used to represent the data. The other parameters 176 * are further explained in the {@link AudioFormat class description}. 177 * 178 * @param encoding the audio encoding technique 179 * @param sampleRate the number of samples per second 180 * @param sampleSizeInBits the number of bits in each sample 181 * @param channels the number of channels (1 for mono, 2 for stereo, 182 * and so on) 183 * @param frameSize the number of bytes in each frame 184 * @param frameRate the number of frames per second 185 * @param bigEndian indicates whether the data for a single sample is 186 * stored in big-endian byte order ({@code false} means 187 * little-endian) 188 */ 189 public AudioFormat(Encoding encoding, float sampleRate, int sampleSizeInBits, 190 int channels, int frameSize, float frameRate, boolean bigEndian) { 191 192 this.encoding = encoding; 193 this.sampleRate = sampleRate; 194 this.sampleSizeInBits = sampleSizeInBits; 195 this.channels = channels; 196 this.frameSize = frameSize; 197 this.frameRate = frameRate; 198 this.bigEndian = bigEndian; 199 this.properties = null; 200 } 201 202 /** 203 * Constructs an {@code AudioFormat} with the given parameters. The encoding 204 * specifies the convention used to represent the data. The other parameters 205 * are further explained in the {@link AudioFormat class description}. 206 * 207 * @param encoding the audio encoding technique 208 * @param sampleRate the number of samples per second 209 * @param sampleSizeInBits the number of bits in each sample 210 * @param channels the number of channels (1 for mono, 2 for stereo, and so 211 * on) 212 * @param frameSize the number of bytes in each frame 213 * @param frameRate the number of frames per second 214 * @param bigEndian indicates whether the data for a single sample is 215 * stored in big-endian byte order ({@code false} means little-endian) 216 * @param properties a {@code Map<String, Object>} object containing format 217 * properties 218 * @since 1.5 219 */ 220 public AudioFormat(Encoding encoding, float sampleRate, 221 int sampleSizeInBits, int channels, 222 int frameSize, float frameRate, 223 boolean bigEndian, Map<String, Object> properties) { 224 this(encoding, sampleRate, sampleSizeInBits, channels, 225 frameSize, frameRate, bigEndian); 226 this.properties = new HashMap<>(properties); 227 } 228 229 /** 230 * Constructs an {@code AudioFormat} with a linear PCM encoding and the 231 * given parameters. The frame size is set to the number of bytes required 232 * to contain one sample from each channel, and the frame rate is set to the 233 * sample rate. 234 * 235 * @param sampleRate the number of samples per second 236 * @param sampleSizeInBits the number of bits in each sample 237 * @param channels the number of channels (1 for mono, 2 for stereo, and so 238 * on) 239 * @param signed indicates whether the data is signed or unsigned 240 * @param bigEndian indicates whether the data for a single sample is 241 * stored in big-endian byte order ({@code false} means 242 * little-endian) 243 */ 244 public AudioFormat(float sampleRate, int sampleSizeInBits, 245 int channels, boolean signed, boolean bigEndian) { 246 247 this((signed == true ? Encoding.PCM_SIGNED : Encoding.PCM_UNSIGNED), 248 sampleRate, 249 sampleSizeInBits, 250 channels, 251 (channels == AudioSystem.NOT_SPECIFIED || sampleSizeInBits == AudioSystem.NOT_SPECIFIED)? 252 AudioSystem.NOT_SPECIFIED: 253 ((sampleSizeInBits + 7) / 8) * channels, 254 sampleRate, 255 bigEndian); 256 } 257 258 /** 259 * Obtains the type of encoding for sounds in this format. 260 * 261 * @return the encoding type 262 * @see Encoding#PCM_SIGNED 263 * @see Encoding#PCM_UNSIGNED 264 * @see Encoding#ULAW 265 * @see Encoding#ALAW 266 */ 267 public Encoding getEncoding() { 268 269 return encoding; 270 } 271 272 /** 273 * Obtains the sample rate. For compressed formats, the return value is the 274 * sample rate of the uncompressed audio data. When this AudioFormat is used 275 * for queries (e.g. {@link AudioSystem#isConversionSupported(AudioFormat, 276 * AudioFormat) AudioSystem.isConversionSupported}) or capabilities (e.g. 277 * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a sample rate 278 * of {@code AudioSystem.NOT_SPECIFIED} means that any sample rate is 279 * acceptable. {@code AudioSystem.NOT_SPECIFIED} is also returned when the 280 * sample rate is not defined for this audio format. 281 * 282 * @return the number of samples per second, or 283 * {@code AudioSystem.NOT_SPECIFIED} 284 * @see #getFrameRate() 285 * @see AudioSystem#NOT_SPECIFIED 286 */ 287 public float getSampleRate() { 288 289 return sampleRate; 290 } 291 292 /** 293 * Obtains the size of a sample. For compressed formats, the return value is 294 * the sample size of the uncompressed audio data. When this AudioFormat is 295 * used for queries (e.g. {@link AudioSystem#isConversionSupported( 296 * AudioFormat,AudioFormat) AudioSystem.isConversionSupported}) or 297 * capabilities (e.g. 298 * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a sample size 299 * of {@code AudioSystem.NOT_SPECIFIED} means that any sample size is 300 * acceptable. {@code AudioSystem.NOT_SPECIFIED} is also returned when the 301 * sample size is not defined for this audio format. 302 * 303 * @return the number of bits in each sample, or 304 * {@code AudioSystem.NOT_SPECIFIED} 305 * @see #getFrameSize() 306 * @see AudioSystem#NOT_SPECIFIED 307 */ 308 public int getSampleSizeInBits() { 309 310 return sampleSizeInBits; 311 } 312 313 /** 314 * Obtains the number of channels. When this AudioFormat is used for queries 315 * (e.g. {@link AudioSystem#isConversionSupported(AudioFormat, AudioFormat) 316 * AudioSystem.isConversionSupported}) or capabilities (e.g. 317 * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a return 318 * value of {@code AudioSystem.NOT_SPECIFIED} means that any (positive) 319 * number of channels is acceptable. 320 * 321 * @return The number of channels (1 for mono, 2 for stereo, etc.), or 322 * {@code AudioSystem.NOT_SPECIFIED} 323 * @see AudioSystem#NOT_SPECIFIED 324 */ 325 public int getChannels() { 326 327 return channels; 328 } 329 330 /** 331 * Obtains the frame size in bytes. When this AudioFormat is used for 332 * queries (e.g. {@link AudioSystem#isConversionSupported(AudioFormat, 333 * AudioFormat) AudioSystem.isConversionSupported}) or capabilities (e.g. 334 * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a frame size 335 * of {@code AudioSystem.NOT_SPECIFIED} means that any frame size is 336 * acceptable. {@code AudioSystem.NOT_SPECIFIED} is also returned when the 337 * frame size is not defined for this audio format. 338 * 339 * @return the number of bytes per frame, or 340 * {@code AudioSystem.NOT_SPECIFIED} 341 * @see #getSampleSizeInBits() 342 * @see AudioSystem#NOT_SPECIFIED 343 */ 344 public int getFrameSize() { 345 346 return frameSize; 347 } 348 349 /** 350 * Obtains the frame rate in frames per second. When this AudioFormat is 351 * used for queries (e.g. {@link AudioSystem#isConversionSupported( 352 * AudioFormat,AudioFormat) AudioSystem.isConversionSupported}) or 353 * capabilities (e.g. 354 * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a frame rate 355 * of {@code AudioSystem.NOT_SPECIFIED} means that any frame rate is 356 * acceptable. {@code AudioSystem.NOT_SPECIFIED} is also returned when the 357 * frame rate is not defined for this audio format. 358 * 359 * @return the number of frames per second, or 360 * {@code AudioSystem.NOT_SPECIFIED} 361 * @see #getSampleRate() 362 * @see AudioSystem#NOT_SPECIFIED 363 */ 364 public float getFrameRate() { 365 366 return frameRate; 367 } 368 369 /** 370 * Indicates whether the audio data is stored in big-endian or little-endian 371 * byte order. If the sample size is not more than one byte, the return 372 * value is irrelevant. 373 * 374 * @return {@code true} if the data is stored in big-endian byte order, 375 * {@code false} if little-endian 376 */ 377 public boolean isBigEndian() { 378 379 return bigEndian; 380 } 381 382 /** 383 * Obtain an unmodifiable map of properties. The concept of properties is 384 * further explained in the {@link AudioFileFormat class description}. 385 * 386 * @return a {@code Map<String, Object>} object containing all properties. 387 * If no properties are recognized, an empty map is returned. 388 * @see #getProperty(String) 389 * @since 1.5 390 */ 391 @SuppressWarnings("unchecked") // Cast of result of clone. 392 public Map<String,Object> properties() { 393 Map<String,Object> ret; 394 if (properties == null) { 395 ret = new HashMap<>(0); 396 } else { 397 ret = (Map<String,Object>) (properties.clone()); 398 } 399 return Collections.unmodifiableMap(ret); 400 } 401 402 /** 403 * Obtain the property value specified by the key. The concept of properties 404 * is further explained in the {@link AudioFileFormat class description}. 405 * <p> 406 * If the specified property is not defined for a particular file format, 407 * this method returns {@code null}. 408 * 409 * @param key the key of the desired property 410 * @return the value of the property with the specified key, or {@code null} 411 * if the property does not exist 412 * @see #properties() 413 * @since 1.5 414 */ 415 public Object getProperty(String key) { 416 if (properties == null) { 417 return null; 418 } 419 return properties.get(key); 420 } 421 422 /** 423 * Indicates whether this format matches the one specified. To match, two 424 * formats must have the same encoding, and consistent values of the number 425 * of channels, sample rate, sample size, frame rate, and frame size. The 426 * values of the property are consistent if they are equal or the specified 427 * format has the property value {@code AudioSystem.NOT_SPECIFIED}. The byte 428 * order (big-endian or little-endian) must be the same if the sample size 429 * is greater than one byte. 430 * 431 * @param format format to test for match 432 * @return {@code true} if this format matches the one specified, 433 * {@code false} otherwise 434 */ 435 public boolean matches(AudioFormat format) { 436 if (format.getEncoding().equals(getEncoding()) 437 && (format.getChannels() == AudioSystem.NOT_SPECIFIED 438 || format.getChannels() == getChannels()) 439 && (format.getSampleRate() == (float)AudioSystem.NOT_SPECIFIED 440 || format.getSampleRate() == getSampleRate()) 441 && (format.getSampleSizeInBits() == AudioSystem.NOT_SPECIFIED 442 || format.getSampleSizeInBits() == getSampleSizeInBits()) 443 && (format.getFrameRate() == (float)AudioSystem.NOT_SPECIFIED 444 || format.getFrameRate() == getFrameRate()) 445 && (format.getFrameSize() == AudioSystem.NOT_SPECIFIED 446 || format.getFrameSize() == getFrameSize()) 447 && (getSampleSizeInBits() <= 8 448 || format.isBigEndian() == isBigEndian())) { 449 return true; 450 } 451 return false; 452 } 453 454 /** 455 * Returns a string that describes the format, such as: "PCM SIGNED 22050 Hz 456 * 16 bit mono big-endian". The contents of the string may vary between 457 * implementations of Java Sound. 458 * 459 * @return a string that describes the format parameters 460 */ 461 @Override 462 public String toString() { 463 String sEncoding = ""; 464 if (getEncoding() != null) { 465 sEncoding = getEncoding().toString() + " "; 466 } 467 468 String sSampleRate; 469 if (getSampleRate() == (float) AudioSystem.NOT_SPECIFIED) { 470 sSampleRate = "unknown sample rate, "; 471 } else { 472 sSampleRate = "" + getSampleRate() + " Hz, "; 473 } 474 475 String sSampleSizeInBits; 476 if (getSampleSizeInBits() == (float) AudioSystem.NOT_SPECIFIED) { 477 sSampleSizeInBits = "unknown bits per sample, "; 478 } else { 479 sSampleSizeInBits = "" + getSampleSizeInBits() + " bit, "; 480 } 481 482 String sChannels; 483 if (getChannels() == 1) { 484 sChannels = "mono, "; 485 } else 486 if (getChannels() == 2) { 487 sChannels = "stereo, "; 488 } else { 489 if (getChannels() == AudioSystem.NOT_SPECIFIED) { 490 sChannels = " unknown number of channels, "; 491 } else { 492 sChannels = ""+getChannels()+" channels, "; 493 } 494 } 495 496 String sFrameSize; 497 if (getFrameSize() == (float) AudioSystem.NOT_SPECIFIED) { 498 sFrameSize = "unknown frame size, "; 499 } else { 500 sFrameSize = "" + getFrameSize()+ " bytes/frame, "; 501 } 502 503 String sFrameRate = ""; 504 if (Math.abs(getSampleRate() - getFrameRate()) > 0.00001) { 505 if (getFrameRate() == (float) AudioSystem.NOT_SPECIFIED) { 506 sFrameRate = "unknown frame rate, "; 507 } else { 508 sFrameRate = getFrameRate() + " frames/second, "; 509 } 510 } 511 512 String sEndian = ""; 513 if ((getEncoding().equals(Encoding.PCM_SIGNED) 514 || getEncoding().equals(Encoding.PCM_UNSIGNED)) 515 && ((getSampleSizeInBits() > 8) 516 || (getSampleSizeInBits() == AudioSystem.NOT_SPECIFIED))) { 517 if (isBigEndian()) { 518 sEndian = "big-endian"; 519 } else { 520 sEndian = "little-endian"; 521 } 522 } 523 524 return sEncoding 525 + sSampleRate 526 + sSampleSizeInBits 527 + sChannels 528 + sFrameSize 529 + sFrameRate 530 + sEndian; 531 532 } 533 534 /** 535 * The {@code Encoding} class names the specific type of data representation 536 * used for an audio stream. The encoding includes aspects of the sound 537 * format other than the number of channels, sample rate, sample size, frame 538 * rate, frame size, and byte order. 539 * <p> 540 * One ubiquitous type of audio encoding is pulse-code modulation (PCM), 541 * which is simply a linear (proportional) representation of the sound 542 * waveform. With PCM, the number stored in each sample is proportional to 543 * the instantaneous amplitude of the sound pressure at that point in time. 544 * The numbers may be signed or unsigned integers or floats. Besides PCM, 545 * other encodings include mu-law and a-law, which are nonlinear mappings of 546 * the sound amplitude that are often used for recording speech. 547 * <p> 548 * You can use a predefined encoding by referring to one of the static 549 * objects created by this class, such as PCM_SIGNED or PCM_UNSIGNED. 550 * Service providers can create new encodings, such as compressed audio 551 * formats, and make these available through the {@link AudioSystem} class. 552 * <p> 553 * The {@code Encoding} class is static, so that all {@code AudioFormat} 554 * objects that have the same encoding will refer to the same object (rather 555 * than different instances of the same class). This allows matches to be 556 * made by checking that two format's encodings are equal. 557 * 558 * @author Kara Kytle 559 * @see AudioFormat 560 * @see javax.sound.sampled.spi.FormatConversionProvider 561 * @since 1.3 562 */ 563 public static class Encoding { 564 565 /** 566 * Specifies signed, linear PCM data. 567 */ 568 public static final Encoding PCM_SIGNED = new Encoding("PCM_SIGNED"); 569 570 /** 571 * Specifies unsigned, linear PCM data. 572 */ 573 public static final Encoding PCM_UNSIGNED = new Encoding("PCM_UNSIGNED"); 574 575 /** 576 * Specifies floating-point PCM data. 577 * 578 * @since 1.7 579 */ 580 public static final Encoding PCM_FLOAT = new Encoding("PCM_FLOAT"); 581 582 /** 583 * Specifies u-law encoded data. 584 */ 585 public static final Encoding ULAW = new Encoding("ULAW"); 586 587 /** 588 * Specifies a-law encoded data. 589 */ 590 public static final Encoding ALAW = new Encoding("ALAW"); 591 592 /** 593 * Encoding name. 594 */ 595 private final String name; 596 597 /** 598 * Constructs a new encoding. 599 * 600 * @param name the name of the new type of encoding 601 */ 602 public Encoding(String name) { 603 this.name = name; 604 } 605 606 /** 607 * Finalizes the equals method. 608 */ 609 @Override 610 public final boolean equals(Object obj) { 611 if (toString() == null) { 612 return (obj != null) && (obj.toString() == null); 613 } 614 if (obj instanceof Encoding) { 615 return toString().equals(obj.toString()); 616 } 617 return false; 618 } 619 620 /** 621 * Finalizes the hashCode method. 622 */ 623 @Override 624 public final int hashCode() { 625 if (toString() == null) { 626 return 0; 627 } 628 return toString().hashCode(); 629 } 630 631 /** 632 * Provides the {@code String} representation of the encoding. This 633 * {@code String} is the same name that was passed to the constructor. 634 * For the predefined encodings, the name is similar to the encoding's 635 * variable (field) name. For example, {@code PCM_SIGNED.toString()} 636 * returns the name "PCM_SIGNED". 637 * 638 * @return the encoding name 639 */ 640 @Override 641 public final String toString() { 642 return name; 643 } 644 } 645 }