1 /*
   2  * Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.  Oracle designates this
   8  * particular file as subject to the "Classpath" exception as provided
   9  * by Oracle in the LICENSE file that accompanied this code.
  10  *
  11  * This code is distributed in the hope that it will be useful, but WITHOUT
  12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  14  * version 2 for more details (a copy is included in the LICENSE file that
  15  * accompanied this code).
  16  *
  17  * You should have received a copy of the GNU General Public License version
  18  * 2 along with this work; if not, write to the Free Software Foundation,
  19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  20  *
  21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  22  * or visit www.oracle.com if you need additional information or have any
  23  * questions.
  24  */
  25 
  26 package javax.sound.sampled;
  27 
  28 import java.util.Collections;
  29 import java.util.HashMap;
  30 import java.util.Map;
  31 import java.util.Objects;
  32 
  33 /**
  34  * {@code AudioFormat} is the class that specifies a particular arrangement of
  35  * data in a sound stream. By examining the information stored in the audio
  36  * format, you can discover how to interpret the bits in the binary sound data.
  37  * <p>
  38  * Every data line has an audio format associated with its data stream. The
  39  * audio format of a source (playback) data line indicates what kind of data the
  40  * data line expects to receive for output. For a target (capture) data line,
  41  * the audio format specifies the kind of the data that can be read from the
  42  * line.
  43  * Sound files also have audio formats, of course. The {@link AudioFileFormat}
  44  * class encapsulates an {@code AudioFormat} in addition to other, file-specific
  45  * information. Similarly, an {@link AudioInputStream} has an
  46  * {@code AudioFormat}.
  47  * <p>
  48  * The {@code AudioFormat} class accommodates a number of common sound-file
  49  * encoding techniques, including pulse-code modulation (PCM), mu-law encoding,
  50  * and a-law encoding. These encoding techniques are predefined, but service
  51  * providers can create new encoding types. The encoding that a specific format
  52  * uses is named by its {@code encoding} field.
  53  * <p>
  54  * In addition to the encoding, the audio format includes other properties that
  55  * further specify the exact arrangement of the data. These include the number
  56  * of channels, sample rate, sample size, byte order, frame rate, and frame
  57  * size. Sounds may have different numbers of audio channels: one for mono, two
  58  * for stereo. The sample rate measures how many "snapshots" (samples) of the
  59  * sound pressure are taken per second, per channel. (If the sound is stereo
  60  * rather than mono, two samples are actually measured at each instant of time:
  61  * one for the left channel, and another for the right channel; however, the
  62  * sample rate still measures the number per channel, so the rate is the same
  63  * regardless of the number of channels. This is the standard use of the term.)
  64  * The sample size indicates how many bits are used to store each snapshot; 8
  65  * and 16 are typical values. For 16-bit samples (or any other sample size
  66  * larger than a byte), byte order is important; the bytes in each sample are
  67  * arranged in either the "little-endian" or "big-endian" style. For encodings
  68  * like PCM, a frame consists of the set of samples for all channels at a given
  69  * point in time, and so the size of a frame (in bytes) is always equal to the
  70  * size of a sample (in bytes) times the number of channels. However, with some
  71  * other sorts of encodings a frame can contain a bundle of compressed data for
  72  * a whole series of samples, as well as additional, non-sample data. For such
  73  * encodings, the sample rate and sample size refer to the data after it is
  74  * decoded into PCM, and so they are completely different from the frame rate
  75  * and frame size.
  76  * <p>
  77  * An {@code AudioFormat} object can include a set of properties. A property is
  78  * a pair of key and value: the key is of type {@code String}, the associated
  79  * property value is an arbitrary object. Properties specify additional format
  80  * specifications, like the bit rate for compressed formats. Properties are
  81  * mainly used as a means to transport additional information of the audio
  82  * format to and from the service providers. Therefore, properties are ignored
  83  * in the {@link #matches(AudioFormat)} method. However, methods which rely on
  84  * the installed service providers, like
  85  * {@link AudioSystem#isConversionSupported (AudioFormat, AudioFormat)
  86  * isConversionSupported} may consider properties, depending on the respective
  87  * service provider implementation.
  88  * <p>
  89  * The following table lists some common properties which service providers
  90  * should use, if applicable:
  91  *
  92  * <table class="striped">
  93  * <caption>Audio Format Properties</caption>
  94  * <thead>
  95  *  <tr>
  96  *   <th>Property key</th>
  97  *   <th>Value type</th>
  98  *   <th>Description</th>
  99  *  </tr>
 100  * </thead>
 101  * <tbody>
 102  *  <tr>
 103  *   <td>&quot;bitrate&quot;</td>
 104  *   <td>{@link java.lang.Integer Integer}</td>
 105  *   <td>average bit rate in bits per second</td>
 106  *  </tr>
 107  *  <tr>
 108  *   <td>&quot;vbr&quot;</td>
 109  *   <td>{@link java.lang.Boolean Boolean}</td>
 110  *   <td>{@code true}, if the file is encoded in variable bit
 111  *       rate (VBR)</td>
 112  *  </tr>
 113  *  <tr>
 114  *   <td>&quot;quality&quot;</td>
 115  *   <td>{@link java.lang.Integer Integer}</td>
 116  *   <td>encoding/conversion quality, 1..100</td>
 117  *  </tr>
 118  * </tbody>
 119  * </table>
 120  * <p>
 121  * Vendors of service providers (plugins) are encouraged to seek information
 122  * about other already established properties in third party plugins, and follow
 123  * the same conventions.
 124  *
 125  * @author Kara Kytle
 126  * @author Florian Bomers
 127  * @see DataLine#getFormat
 128  * @see AudioInputStream#getFormat
 129  * @see AudioFileFormat
 130  * @see javax.sound.sampled.spi.FormatConversionProvider
 131  * @since 1.3
 132  */
 133 public class AudioFormat {
 134 
 135     /**
 136      * The audio encoding technique used by this format.
 137      */
 138     protected Encoding encoding;
 139 
 140     /**
 141      * The number of samples played or recorded per second, for sounds that have
 142      * this format.
 143      */
 144     protected float sampleRate;
 145 
 146     /**
 147      * The number of bits in each sample of a sound that has this format.
 148      */
 149     protected int sampleSizeInBits;
 150 
 151     /**
 152      * The number of audio channels in this format (1 for mono, 2 for stereo).
 153      */
 154     protected int channels;
 155 
 156     /**
 157      * The number of bytes in each frame of a sound that has this format.
 158      */
 159     protected int frameSize;
 160 
 161     /**
 162      * The number of frames played or recorded per second, for sounds that have
 163      * this format.
 164      */
 165     protected float frameRate;
 166 
 167     /**
 168      * Indicates whether the audio data is stored in big-endian or little-endian
 169      * order.
 170      */
 171     protected boolean bigEndian;
 172 
 173     /**
 174      * The set of properties.
 175      */
 176     private HashMap<String, Object> properties;
 177 
 178     /**
 179      * Constructs an {@code AudioFormat} with the given parameters. The encoding
 180      * specifies the convention used to represent the data. The other parameters
 181      * are further explained in the {@link AudioFormat class description}.
 182      *
 183      * @param  encoding the audio encoding technique
 184      * @param  sampleRate the number of samples per second
 185      * @param  sampleSizeInBits the number of bits in each sample
 186      * @param  channels the number of channels (1 for mono, 2 for stereo,
 187      *         and so on)
 188      * @param  frameSize the number of bytes in each frame
 189      * @param  frameRate the number of frames per second
 190      * @param  bigEndian indicates whether the data for a single sample is
 191      *         stored in big-endian byte order ({@code false} means
 192      *         little-endian)
 193      */
 194     public AudioFormat(Encoding encoding, float sampleRate, int sampleSizeInBits,
 195                        int channels, int frameSize, float frameRate, boolean bigEndian) {
 196 
 197         this.encoding = encoding;
 198         this.sampleRate = sampleRate;
 199         this.sampleSizeInBits = sampleSizeInBits;
 200         this.channels = channels;
 201         this.frameSize = frameSize;
 202         this.frameRate = frameRate;
 203         this.bigEndian = bigEndian;
 204         this.properties = null;
 205     }
 206 
 207     /**
 208      * Constructs an {@code AudioFormat} with the given parameters. The encoding
 209      * specifies the convention used to represent the data. The other parameters
 210      * are further explained in the {@link AudioFormat class description}.
 211      *
 212      * @param  encoding the audio encoding technique
 213      * @param  sampleRate the number of samples per second
 214      * @param  sampleSizeInBits the number of bits in each sample
 215      * @param  channels the number of channels (1 for mono, 2 for stereo, and so
 216      *         on)
 217      * @param  frameSize the number of bytes in each frame
 218      * @param  frameRate the number of frames per second
 219      * @param  bigEndian indicates whether the data for a single sample is
 220      *         stored in big-endian byte order ({@code false} means little-endian)
 221      * @param  properties a {@code Map<String, Object>} object containing format
 222      *         properties
 223      * @since 1.5
 224      */
 225     public AudioFormat(Encoding encoding, float sampleRate,
 226                        int sampleSizeInBits, int channels,
 227                        int frameSize, float frameRate,
 228                        boolean bigEndian, Map<String, Object> properties) {
 229         this(encoding, sampleRate, sampleSizeInBits, channels,
 230              frameSize, frameRate, bigEndian);
 231         this.properties = new HashMap<>(properties);
 232     }
 233 
 234     /**
 235      * Constructs an {@code AudioFormat} with a linear PCM encoding and the
 236      * given parameters. The frame size is set to the number of bytes required
 237      * to contain one sample from each channel, and the frame rate is set to the
 238      * sample rate.
 239      *
 240      * @param  sampleRate the number of samples per second
 241      * @param  sampleSizeInBits the number of bits in each sample
 242      * @param  channels the number of channels (1 for mono, 2 for stereo, and so
 243      *         on)
 244      * @param  signed indicates whether the data is signed or unsigned
 245      * @param  bigEndian indicates whether the data for a single sample is
 246      *         stored in big-endian byte order ({@code false} means
 247      *         little-endian)
 248      */
 249     public AudioFormat(float sampleRate, int sampleSizeInBits,
 250                        int channels, boolean signed, boolean bigEndian) {
 251 
 252         this((signed == true ? Encoding.PCM_SIGNED : Encoding.PCM_UNSIGNED),
 253              sampleRate,
 254              sampleSizeInBits,
 255              channels,
 256              (channels == AudioSystem.NOT_SPECIFIED || sampleSizeInBits == AudioSystem.NOT_SPECIFIED)?
 257              AudioSystem.NOT_SPECIFIED:
 258              ((sampleSizeInBits + 7) / 8) * channels,
 259              sampleRate,
 260              bigEndian);
 261     }
 262 
 263     /**
 264      * Obtains the type of encoding for sounds in this format.
 265      *
 266      * @return the encoding type
 267      * @see Encoding#PCM_SIGNED
 268      * @see Encoding#PCM_UNSIGNED
 269      * @see Encoding#ULAW
 270      * @see Encoding#ALAW
 271      */
 272     public Encoding getEncoding() {
 273 
 274         return encoding;
 275     }
 276 
 277     /**
 278      * Obtains the sample rate. For compressed formats, the return value is the
 279      * sample rate of the uncompressed audio data. When this AudioFormat is used
 280      * for queries (e.g. {@link AudioSystem#isConversionSupported(AudioFormat,
 281      * AudioFormat) AudioSystem.isConversionSupported}) or capabilities (e.g.
 282      * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a sample rate
 283      * of {@code AudioSystem.NOT_SPECIFIED} means that any sample rate is
 284      * acceptable. {@code AudioSystem.NOT_SPECIFIED} is also returned when the
 285      * sample rate is not defined for this audio format.
 286      *
 287      * @return the number of samples per second, or
 288      *         {@code AudioSystem.NOT_SPECIFIED}
 289      * @see #getFrameRate()
 290      * @see AudioSystem#NOT_SPECIFIED
 291      */
 292     public float getSampleRate() {
 293 
 294         return sampleRate;
 295     }
 296 
 297     /**
 298      * Obtains the size of a sample. For compressed formats, the return value is
 299      * the sample size of the uncompressed audio data. When this AudioFormat is
 300      * used for queries (e.g. {@link AudioSystem#isConversionSupported(
 301      * AudioFormat,AudioFormat) AudioSystem.isConversionSupported}) or
 302      * capabilities (e.g.
 303      * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a sample size
 304      * of {@code AudioSystem.NOT_SPECIFIED} means that any sample size is
 305      * acceptable. {@code AudioSystem.NOT_SPECIFIED} is also returned when the
 306      * sample size is not defined for this audio format.
 307      *
 308      * @return the number of bits in each sample, or
 309      *         {@code AudioSystem.NOT_SPECIFIED}
 310      * @see #getFrameSize()
 311      * @see AudioSystem#NOT_SPECIFIED
 312      */
 313     public int getSampleSizeInBits() {
 314 
 315         return sampleSizeInBits;
 316     }
 317 
 318     /**
 319      * Obtains the number of channels. When this AudioFormat is used for queries
 320      * (e.g. {@link AudioSystem#isConversionSupported(AudioFormat, AudioFormat)
 321      * AudioSystem.isConversionSupported}) or capabilities (e.g.
 322      * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a return
 323      * value of {@code AudioSystem.NOT_SPECIFIED} means that any (positive)
 324      * number of channels is acceptable.
 325      *
 326      * @return The number of channels (1 for mono, 2 for stereo, etc.), or
 327      *         {@code AudioSystem.NOT_SPECIFIED}
 328      * @see AudioSystem#NOT_SPECIFIED
 329      */
 330     public int getChannels() {
 331 
 332         return channels;
 333     }
 334 
 335     /**
 336      * Obtains the frame size in bytes. When this AudioFormat is used for
 337      * queries (e.g. {@link AudioSystem#isConversionSupported(AudioFormat,
 338      * AudioFormat) AudioSystem.isConversionSupported}) or capabilities (e.g.
 339      * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a frame size
 340      * of {@code AudioSystem.NOT_SPECIFIED} means that any frame size is
 341      * acceptable. {@code AudioSystem.NOT_SPECIFIED} is also returned when the
 342      * frame size is not defined for this audio format.
 343      *
 344      * @return the number of bytes per frame, or
 345      *         {@code AudioSystem.NOT_SPECIFIED}
 346      * @see #getSampleSizeInBits()
 347      * @see AudioSystem#NOT_SPECIFIED
 348      */
 349     public int getFrameSize() {
 350 
 351         return frameSize;
 352     }
 353 
 354     /**
 355      * Obtains the frame rate in frames per second. When this AudioFormat is
 356      * used for queries (e.g. {@link AudioSystem#isConversionSupported(
 357      * AudioFormat,AudioFormat) AudioSystem.isConversionSupported}) or
 358      * capabilities (e.g.
 359      * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a frame rate
 360      * of {@code AudioSystem.NOT_SPECIFIED} means that any frame rate is
 361      * acceptable. {@code AudioSystem.NOT_SPECIFIED} is also returned when the
 362      * frame rate is not defined for this audio format.
 363      *
 364      * @return the number of frames per second, or
 365      *         {@code AudioSystem.NOT_SPECIFIED}
 366      * @see #getSampleRate()
 367      * @see AudioSystem#NOT_SPECIFIED
 368      */
 369     public float getFrameRate() {
 370 
 371         return frameRate;
 372     }
 373 
 374     /**
 375      * Indicates whether the audio data is stored in big-endian or little-endian
 376      * byte order. If the sample size is not more than one byte, the return
 377      * value is irrelevant.
 378      *
 379      * @return {@code true} if the data is stored in big-endian byte order,
 380      *         {@code false} if little-endian
 381      */
 382     public boolean isBigEndian() {
 383 
 384         return bigEndian;
 385     }
 386 
 387     /**
 388      * Obtain an unmodifiable map of properties. The concept of properties is
 389      * further explained in the {@link AudioFileFormat class description}.
 390      *
 391      * @return a {@code Map<String, Object>} object containing all properties.
 392      *         If no properties are recognized, an empty map is returned.
 393      * @see #getProperty(String)
 394      * @since 1.5
 395      */
 396     @SuppressWarnings("unchecked") // Cast of result of clone.
 397     public Map<String,Object> properties() {
 398         Map<String,Object> ret;
 399         if (properties == null) {
 400             ret = new HashMap<>(0);
 401         } else {
 402             ret = (Map<String,Object>) (properties.clone());
 403         }
 404         return Collections.unmodifiableMap(ret);
 405     }
 406 
 407     /**
 408      * Obtain the property value specified by the key. The concept of properties
 409      * is further explained in the {@link AudioFileFormat class description}.
 410      * <p>
 411      * If the specified property is not defined for a particular file format,
 412      * this method returns {@code null}.
 413      *
 414      * @param  key the key of the desired property
 415      * @return the value of the property with the specified key, or {@code null}
 416      *         if the property does not exist
 417      * @see #properties()
 418      * @since 1.5
 419      */
 420     public Object getProperty(String key) {
 421         if (properties == null) {
 422             return null;
 423         }
 424         return properties.get(key);
 425     }
 426 
 427     /**
 428      * Indicates whether this format matches the one specified. To match, two
 429      * formats must have the same encoding, and consistent values of the number
 430      * of channels, sample rate, sample size, frame rate, and frame size. The
 431      * values of the property are consistent if they are equal or the specified
 432      * format has the property value {@code AudioSystem.NOT_SPECIFIED}. The byte
 433      * order (big-endian or little-endian) must be the same if the sample size
 434      * is greater than one byte.
 435      *
 436      * @param  format format to test for match
 437      * @return {@code true} if this format matches the one specified,
 438      *         {@code false} otherwise
 439      */
 440     public boolean matches(AudioFormat format) {
 441         if (format.getEncoding().equals(getEncoding())
 442                 && (format.getChannels() == AudioSystem.NOT_SPECIFIED
 443                     || format.getChannels() == getChannels())
 444                 && (format.getSampleRate() == (float)AudioSystem.NOT_SPECIFIED
 445                     || format.getSampleRate() == getSampleRate())
 446                 && (format.getSampleSizeInBits() == AudioSystem.NOT_SPECIFIED
 447                     || format.getSampleSizeInBits() == getSampleSizeInBits())
 448                 && (format.getFrameRate() == (float)AudioSystem.NOT_SPECIFIED
 449                     || format.getFrameRate() == getFrameRate())
 450                 && (format.getFrameSize() == AudioSystem.NOT_SPECIFIED
 451                     || format.getFrameSize() == getFrameSize())
 452                 && (getSampleSizeInBits() <= 8
 453                     || format.isBigEndian() == isBigEndian())) {
 454             return true;
 455         }
 456         return false;
 457     }
 458 
 459     /**
 460      * Returns a string that describes the format, such as: "PCM SIGNED 22050 Hz
 461      * 16 bit mono big-endian". The contents of the string may vary between
 462      * implementations of Java Sound.
 463      *
 464      * @return a string that describes the format parameters
 465      */
 466     @Override
 467     public String toString() {
 468         String sEncoding = "";
 469         if (getEncoding() != null) {
 470             sEncoding = getEncoding().toString() + " ";
 471         }
 472 
 473         String sSampleRate;
 474         if (getSampleRate() == (float) AudioSystem.NOT_SPECIFIED) {
 475             sSampleRate = "unknown sample rate, ";
 476         } else {
 477             sSampleRate = "" + getSampleRate() + " Hz, ";
 478         }
 479 
 480         String sSampleSizeInBits;
 481         if (getSampleSizeInBits() == (float) AudioSystem.NOT_SPECIFIED) {
 482             sSampleSizeInBits = "unknown bits per sample, ";
 483         } else {
 484             sSampleSizeInBits = "" + getSampleSizeInBits() + " bit, ";
 485         }
 486 
 487         String sChannels;
 488         if (getChannels() == 1) {
 489             sChannels = "mono, ";
 490         } else
 491             if (getChannels() == 2) {
 492                 sChannels = "stereo, ";
 493             } else {
 494                 if (getChannels() == AudioSystem.NOT_SPECIFIED) {
 495                     sChannels = " unknown number of channels, ";
 496                 } else {
 497                     sChannels = ""+getChannels()+" channels, ";
 498                 }
 499             }
 500 
 501         String sFrameSize;
 502         if (getFrameSize() == (float) AudioSystem.NOT_SPECIFIED) {
 503             sFrameSize = "unknown frame size, ";
 504         } else {
 505             sFrameSize = "" + getFrameSize()+ " bytes/frame, ";
 506         }
 507 
 508         String sFrameRate = "";
 509         if (Math.abs(getSampleRate() - getFrameRate()) > 0.00001) {
 510             if (getFrameRate() == (float) AudioSystem.NOT_SPECIFIED) {
 511                 sFrameRate = "unknown frame rate, ";
 512             } else {
 513                 sFrameRate = getFrameRate() + " frames/second, ";
 514             }
 515         }
 516 
 517         String sEndian = "";
 518         if ((getEncoding().equals(Encoding.PCM_SIGNED)
 519              || getEncoding().equals(Encoding.PCM_UNSIGNED))
 520             && ((getSampleSizeInBits() > 8)
 521                 || (getSampleSizeInBits() == AudioSystem.NOT_SPECIFIED))) {
 522             if (isBigEndian()) {
 523                 sEndian = "big-endian";
 524             } else {
 525                 sEndian = "little-endian";
 526             }
 527         }
 528 
 529         return sEncoding
 530             + sSampleRate
 531             + sSampleSizeInBits
 532             + sChannels
 533             + sFrameSize
 534             + sFrameRate
 535             + sEndian;
 536 
 537     }
 538 
 539     /**
 540      * The {@code Encoding} class names the specific type of data representation
 541      * used for an audio stream. The encoding includes aspects of the sound
 542      * format other than the number of channels, sample rate, sample size, frame
 543      * rate, frame size, and byte order.
 544      * <p>
 545      * One ubiquitous type of audio encoding is pulse-code modulation (PCM),
 546      * which is simply a linear (proportional) representation of the sound
 547      * waveform. With PCM, the number stored in each sample is proportional to
 548      * the instantaneous amplitude of the sound pressure at that point in time.
 549      * The numbers may be signed or unsigned integers or floats. Besides PCM,
 550      * other encodings include mu-law and a-law, which are nonlinear mappings of
 551      * the sound amplitude that are often used for recording speech.
 552      * <p>
 553      * You can use a predefined encoding by referring to one of the static
 554      * objects created by this class, such as PCM_SIGNED or PCM_UNSIGNED.
 555      * Service providers can create new encodings, such as compressed audio
 556      * formats, and make these available through the {@link AudioSystem} class.
 557      * <p>
 558      * The {@code Encoding} class is static, so that all {@code AudioFormat}
 559      * objects that have the same encoding will refer to the same object (rather
 560      * than different instances of the same class). This allows matches to be
 561      * made by checking that two format's encodings are equal.
 562      *
 563      * @author Kara Kytle
 564      * @see AudioFormat
 565      * @see javax.sound.sampled.spi.FormatConversionProvider
 566      * @since 1.3
 567      */
 568     public static class Encoding {
 569 
 570         /**
 571          * Specifies signed, linear PCM data.
 572          */
 573         public static final Encoding PCM_SIGNED = new Encoding("PCM_SIGNED");
 574 
 575         /**
 576          * Specifies unsigned, linear PCM data.
 577          */
 578         public static final Encoding PCM_UNSIGNED = new Encoding("PCM_UNSIGNED");
 579 
 580         /**
 581          * Specifies floating-point PCM data.
 582          *
 583          * @since 1.7
 584          */
 585         public static final Encoding PCM_FLOAT = new Encoding("PCM_FLOAT");
 586 
 587         /**
 588          * Specifies u-law encoded data.
 589          */
 590         public static final Encoding ULAW = new Encoding("ULAW");
 591 
 592         /**
 593          * Specifies a-law encoded data.
 594          */
 595         public static final Encoding ALAW = new Encoding("ALAW");
 596 
 597         /**
 598          * Encoding name.
 599          */
 600         private final String name;
 601 
 602         /**
 603          * Constructs a new encoding.
 604          *
 605          * @param  name the name of the new type of encoding
 606          */
 607         public Encoding(final String name) {
 608             this.name = name;
 609         }
 610 
 611         /**
 612          * Finalizes the equals method.
 613          */
 614         @Override
 615         public final boolean equals(final Object obj) {
 616             if (this == obj) {
 617                 return true;
 618             }
 619             if (!(obj instanceof Encoding)) {
 620                 return false;
 621             }
 622             return Objects.equals(name, ((Encoding) obj).name);
 623         }
 624 
 625         /**
 626          * Finalizes the hashCode method.
 627          */
 628         @Override
 629         public final int hashCode() {
 630             return name != null ? name.hashCode() : 0;
 631         }
 632 
 633         /**
 634          * Provides the {@code String} representation of the encoding. This
 635          * {@code String} is the same name that was passed to the constructor.
 636          * For the predefined encodings, the name is similar to the encoding's
 637          * variable (field) name. For example, {@code PCM_SIGNED.toString()}
 638          * returns the name "PCM_SIGNED".
 639          *
 640          * @return the encoding name
 641          */
 642         @Override
 643         public final String toString() {
 644             return name;
 645         }
 646     }
 647 }