1 /*
   2  * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.  Oracle designates this
   8  * particular file as subject to the "Classpath" exception as provided
   9  * by Oracle in the LICENSE file that accompanied this code.
  10  *
  11  * This code is distributed in the hope that it will be useful, but WITHOUT
  12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  14  * version 2 for more details (a copy is included in the LICENSE file that
  15  * accompanied this code).
  16  *
  17  * You should have received a copy of the GNU General Public License version
  18  * 2 along with this work; if not, write to the Free Software Foundation,
  19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  20  *
  21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  22  * or visit www.oracle.com if you need additional information or have any
  23  * questions.
  24  */
  25 
  26 package javax.sound.sampled;
  27 
  28 import java.util.Collections;
  29 import java.util.HashMap;
  30 import java.util.Map;
  31 import java.util.Objects;
  32 
  33 /**
  34  * {@code AudioFormat} is the class that specifies a particular arrangement of
  35  * data in a sound stream. By examining the information stored in the audio
  36  * format, you can discover how to interpret the bits in the binary sound data.
  37  * <p>
  38  * Every data line has an audio format associated with its data stream. The
  39  * audio format of a source (playback) data line indicates what kind of data the
  40  * data line expects to receive for output. For a target (capture) data line,
  41  * the audio format specifies the kind of the data that can be read from the
  42  * line.
  43  * Sound files also have audio formats, of course. The {@link AudioFileFormat}
  44  * class encapsulates an {@code AudioFormat} in addition to other, file-specific
  45  * information. Similarly, an {@link AudioInputStream} has an
  46  * {@code AudioFormat}.
  47  * <p>
  48  * The {@code AudioFormat} class accommodates a number of common sound-file
  49  * encoding techniques, including pulse-code modulation (PCM), mu-law encoding,
  50  * and a-law encoding. These encoding techniques are predefined, but service
  51  * providers can create new encoding types. The encoding that a specific format
  52  * uses is named by its {@code encoding} field.
  53  * <p>
  54  * In addition to the encoding, the audio format includes other properties that
  55  * further specify the exact arrangement of the data. These include the number
  56  * of channels, sample rate, sample size, byte order, frame rate, and frame
  57  * size. Sounds may have different numbers of audio channels: one for mono, two
  58  * for stereo. The sample rate measures how many "snapshots" (samples) of the
  59  * sound pressure are taken per second, per channel. (If the sound is stereo
  60  * rather than mono, two samples are actually measured at each instant of time:
  61  * one for the left channel, and another for the right channel; however, the
  62  * sample rate still measures the number per channel, so the rate is the same
  63  * regardless of the number of channels. This is the standard use of the term.)
  64  * The sample size indicates how many bits are used to store each snapshot; 8
  65  * and 16 are typical values. For 16-bit samples (or any other sample size
  66  * larger than a byte), byte order is important; the bytes in each sample are
  67  * arranged in either the "little-endian" or "big-endian" style. For encodings
  68  * like PCM, a frame consists of the set of samples for all channels at a given
  69  * point in time, and so the size of a frame (in bytes) is always equal to the
  70  * size of a sample (in bytes) times the number of channels. However, with some
  71  * other sorts of encodings a frame can contain a bundle of compressed data for
  72  * a whole series of samples, as well as additional, non-sample data. For such
  73  * encodings, the sample rate and sample size refer to the data after it is
  74  * decoded into PCM, and so they are completely different from the frame rate
  75  * and frame size.
  76  * <p>
  77  * An {@code AudioFormat} object can include a set of properties. A property is
  78  * a pair of key and value: the key is of type {@code String}, the associated
  79  * property value is an arbitrary object. Properties specify additional format
  80  * specifications, like the bit rate for compressed formats. Properties are
  81  * mainly used as a means to transport additional information of the audio
  82  * format to and from the service providers. Therefore, properties are ignored
  83  * in the {@link #matches(AudioFormat)} method. However, methods which rely on
  84  * the installed service providers, like
  85  * {@link AudioSystem#isConversionSupported (AudioFormat, AudioFormat)
  86  * isConversionSupported} may consider properties, depending on the respective
  87  * service provider implementation.
  88  * <p>
  89  * The following table lists some common properties which service providers
  90  * should use, if applicable:
  91  *
  92  * <table border=0>
  93  *  <caption>Audio Format Properties</caption>
  94  *  <tr>
  95  *   <th>Property key</th>
  96  *   <th>Value type</th>
  97  *   <th>Description</th>
  98  *  </tr>
  99  *  <tr>
 100  *   <td>&quot;bitrate&quot;</td>
 101  *   <td>{@link java.lang.Integer Integer}</td>
 102  *   <td>average bit rate in bits per second</td>
 103  *  </tr>
 104  *  <tr>
 105  *   <td>&quot;vbr&quot;</td>
 106  *   <td>{@link java.lang.Boolean Boolean}</td>
 107  *   <td>{@code true}, if the file is encoded in variable bit
 108  *       rate (VBR)</td>
 109  *  </tr>
 110  *  <tr>
 111  *   <td>&quot;quality&quot;</td>
 112  *   <td>{@link java.lang.Integer Integer}</td>
 113  *   <td>encoding/conversion quality, 1..100</td>
 114  *  </tr>
 115  * </table>
 116  * <p>
 117  * Vendors of service providers (plugins) are encouraged to seek information
 118  * about other already established properties in third party plugins, and follow
 119  * the same conventions.
 120  *
 121  * @author Kara Kytle
 122  * @author Florian Bomers
 123  * @see DataLine#getFormat
 124  * @see AudioInputStream#getFormat
 125  * @see AudioFileFormat
 126  * @see javax.sound.sampled.spi.FormatConversionProvider
 127  * @since 1.3
 128  */
 129 public class AudioFormat {
 130 
 131     /**
 132      * The audio encoding technique used by this format.
 133      */
 134     protected Encoding encoding;
 135 
 136     /**
 137      * The number of samples played or recorded per second, for sounds that have
 138      * this format.
 139      */
 140     protected float sampleRate;
 141 
 142     /**
 143      * The number of bits in each sample of a sound that has this format.
 144      */
 145     protected int sampleSizeInBits;
 146 
 147     /**
 148      * The number of audio channels in this format (1 for mono, 2 for stereo).
 149      */
 150     protected int channels;
 151 
 152     /**
 153      * The number of bytes in each frame of a sound that has this format.
 154      */
 155     protected int frameSize;
 156 
 157     /**
 158      * The number of frames played or recorded per second, for sounds that have
 159      * this format.
 160      */
 161     protected float frameRate;
 162 
 163     /**
 164      * Indicates whether the audio data is stored in big-endian or little-endian
 165      * order.
 166      */
 167     protected boolean bigEndian;
 168 
 169     /**
 170      * The set of properties.
 171      */
 172     private HashMap<String, Object> properties;
 173 
 174     /**
 175      * Constructs an {@code AudioFormat} with the given parameters. The encoding
 176      * specifies the convention used to represent the data. The other parameters
 177      * are further explained in the {@link AudioFormat class description}.
 178      *
 179      * @param  encoding the audio encoding technique
 180      * @param  sampleRate the number of samples per second
 181      * @param  sampleSizeInBits the number of bits in each sample
 182      * @param  channels the number of channels (1 for mono, 2 for stereo,
 183      *         and so on)
 184      * @param  frameSize the number of bytes in each frame
 185      * @param  frameRate the number of frames per second
 186      * @param  bigEndian indicates whether the data for a single sample is
 187      *         stored in big-endian byte order ({@code false} means
 188      *         little-endian)
 189      */
 190     public AudioFormat(Encoding encoding, float sampleRate, int sampleSizeInBits,
 191                        int channels, int frameSize, float frameRate, boolean bigEndian) {
 192 
 193         this.encoding = encoding;
 194         this.sampleRate = sampleRate;
 195         this.sampleSizeInBits = sampleSizeInBits;
 196         this.channels = channels;
 197         this.frameSize = frameSize;
 198         this.frameRate = frameRate;
 199         this.bigEndian = bigEndian;
 200         this.properties = null;
 201     }
 202 
 203     /**
 204      * Constructs an {@code AudioFormat} with the given parameters. The encoding
 205      * specifies the convention used to represent the data. The other parameters
 206      * are further explained in the {@link AudioFormat class description}.
 207      *
 208      * @param  encoding the audio encoding technique
 209      * @param  sampleRate the number of samples per second
 210      * @param  sampleSizeInBits the number of bits in each sample
 211      * @param  channels the number of channels (1 for mono, 2 for stereo, and so
 212      *         on)
 213      * @param  frameSize the number of bytes in each frame
 214      * @param  frameRate the number of frames per second
 215      * @param  bigEndian indicates whether the data for a single sample is
 216      *         stored in big-endian byte order ({@code false} means little-endian)
 217      * @param  properties a {@code Map<String, Object>} object containing format
 218      *         properties
 219      * @since 1.5
 220      */
 221     public AudioFormat(Encoding encoding, float sampleRate,
 222                        int sampleSizeInBits, int channels,
 223                        int frameSize, float frameRate,
 224                        boolean bigEndian, Map<String, Object> properties) {
 225         this(encoding, sampleRate, sampleSizeInBits, channels,
 226              frameSize, frameRate, bigEndian);
 227         this.properties = new HashMap<>(properties);
 228     }
 229 
 230     /**
 231      * Constructs an {@code AudioFormat} with a linear PCM encoding and the
 232      * given parameters. The frame size is set to the number of bytes required
 233      * to contain one sample from each channel, and the frame rate is set to the
 234      * sample rate.
 235      *
 236      * @param  sampleRate the number of samples per second
 237      * @param  sampleSizeInBits the number of bits in each sample
 238      * @param  channels the number of channels (1 for mono, 2 for stereo, and so
 239      *         on)
 240      * @param  signed indicates whether the data is signed or unsigned
 241      * @param  bigEndian indicates whether the data for a single sample is
 242      *         stored in big-endian byte order ({@code false} means
 243      *         little-endian)
 244      */
 245     public AudioFormat(float sampleRate, int sampleSizeInBits,
 246                        int channels, boolean signed, boolean bigEndian) {
 247 
 248         this((signed == true ? Encoding.PCM_SIGNED : Encoding.PCM_UNSIGNED),
 249              sampleRate,
 250              sampleSizeInBits,
 251              channels,
 252              (channels == AudioSystem.NOT_SPECIFIED || sampleSizeInBits == AudioSystem.NOT_SPECIFIED)?
 253              AudioSystem.NOT_SPECIFIED:
 254              ((sampleSizeInBits + 7) / 8) * channels,
 255              sampleRate,
 256              bigEndian);
 257     }
 258 
 259     /**
 260      * Obtains the type of encoding for sounds in this format.
 261      *
 262      * @return the encoding type
 263      * @see Encoding#PCM_SIGNED
 264      * @see Encoding#PCM_UNSIGNED
 265      * @see Encoding#ULAW
 266      * @see Encoding#ALAW
 267      */
 268     public Encoding getEncoding() {
 269 
 270         return encoding;
 271     }
 272 
 273     /**
 274      * Obtains the sample rate. For compressed formats, the return value is the
 275      * sample rate of the uncompressed audio data. When this AudioFormat is used
 276      * for queries (e.g. {@link AudioSystem#isConversionSupported(AudioFormat,
 277      * AudioFormat) AudioSystem.isConversionSupported}) or capabilities (e.g.
 278      * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a sample rate
 279      * of {@code AudioSystem.NOT_SPECIFIED} means that any sample rate is
 280      * acceptable. {@code AudioSystem.NOT_SPECIFIED} is also returned when the
 281      * sample rate is not defined for this audio format.
 282      *
 283      * @return the number of samples per second, or
 284      *         {@code AudioSystem.NOT_SPECIFIED}
 285      * @see #getFrameRate()
 286      * @see AudioSystem#NOT_SPECIFIED
 287      */
 288     public float getSampleRate() {
 289 
 290         return sampleRate;
 291     }
 292 
 293     /**
 294      * Obtains the size of a sample. For compressed formats, the return value is
 295      * the sample size of the uncompressed audio data. When this AudioFormat is
 296      * used for queries (e.g. {@link AudioSystem#isConversionSupported(
 297      * AudioFormat,AudioFormat) AudioSystem.isConversionSupported}) or
 298      * capabilities (e.g.
 299      * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a sample size
 300      * of {@code AudioSystem.NOT_SPECIFIED} means that any sample size is
 301      * acceptable. {@code AudioSystem.NOT_SPECIFIED} is also returned when the
 302      * sample size is not defined for this audio format.
 303      *
 304      * @return the number of bits in each sample, or
 305      *         {@code AudioSystem.NOT_SPECIFIED}
 306      * @see #getFrameSize()
 307      * @see AudioSystem#NOT_SPECIFIED
 308      */
 309     public int getSampleSizeInBits() {
 310 
 311         return sampleSizeInBits;
 312     }
 313 
 314     /**
 315      * Obtains the number of channels. When this AudioFormat is used for queries
 316      * (e.g. {@link AudioSystem#isConversionSupported(AudioFormat, AudioFormat)
 317      * AudioSystem.isConversionSupported}) or capabilities (e.g.
 318      * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a return
 319      * value of {@code AudioSystem.NOT_SPECIFIED} means that any (positive)
 320      * number of channels is acceptable.
 321      *
 322      * @return The number of channels (1 for mono, 2 for stereo, etc.), or
 323      *         {@code AudioSystem.NOT_SPECIFIED}
 324      * @see AudioSystem#NOT_SPECIFIED
 325      */
 326     public int getChannels() {
 327 
 328         return channels;
 329     }
 330 
 331     /**
 332      * Obtains the frame size in bytes. When this AudioFormat is used for
 333      * queries (e.g. {@link AudioSystem#isConversionSupported(AudioFormat,
 334      * AudioFormat) AudioSystem.isConversionSupported}) or capabilities (e.g.
 335      * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a frame size
 336      * of {@code AudioSystem.NOT_SPECIFIED} means that any frame size is
 337      * acceptable. {@code AudioSystem.NOT_SPECIFIED} is also returned when the
 338      * frame size is not defined for this audio format.
 339      *
 340      * @return the number of bytes per frame, or
 341      *         {@code AudioSystem.NOT_SPECIFIED}
 342      * @see #getSampleSizeInBits()
 343      * @see AudioSystem#NOT_SPECIFIED
 344      */
 345     public int getFrameSize() {
 346 
 347         return frameSize;
 348     }
 349 
 350     /**
 351      * Obtains the frame rate in frames per second. When this AudioFormat is
 352      * used for queries (e.g. {@link AudioSystem#isConversionSupported(
 353      * AudioFormat,AudioFormat) AudioSystem.isConversionSupported}) or
 354      * capabilities (e.g.
 355      * {@link DataLine.Info#getFormats DataLine.Info.getFormats}), a frame rate
 356      * of {@code AudioSystem.NOT_SPECIFIED} means that any frame rate is
 357      * acceptable. {@code AudioSystem.NOT_SPECIFIED} is also returned when the
 358      * frame rate is not defined for this audio format.
 359      *
 360      * @return the number of frames per second, or
 361      *         {@code AudioSystem.NOT_SPECIFIED}
 362      * @see #getSampleRate()
 363      * @see AudioSystem#NOT_SPECIFIED
 364      */
 365     public float getFrameRate() {
 366 
 367         return frameRate;
 368     }
 369 
 370     /**
 371      * Indicates whether the audio data is stored in big-endian or little-endian
 372      * byte order. If the sample size is not more than one byte, the return
 373      * value is irrelevant.
 374      *
 375      * @return {@code true} if the data is stored in big-endian byte order,
 376      *         {@code false} if little-endian
 377      */
 378     public boolean isBigEndian() {
 379 
 380         return bigEndian;
 381     }
 382 
 383     /**
 384      * Obtain an unmodifiable map of properties. The concept of properties is
 385      * further explained in the {@link AudioFileFormat class description}.
 386      *
 387      * @return a {@code Map<String, Object>} object containing all properties.
 388      *         If no properties are recognized, an empty map is returned.
 389      * @see #getProperty(String)
 390      * @since 1.5
 391      */
 392     @SuppressWarnings("unchecked") // Cast of result of clone.
 393     public Map<String,Object> properties() {
 394         Map<String,Object> ret;
 395         if (properties == null) {
 396             ret = new HashMap<>(0);
 397         } else {
 398             ret = (Map<String,Object>) (properties.clone());
 399         }
 400         return Collections.unmodifiableMap(ret);
 401     }
 402 
 403     /**
 404      * Obtain the property value specified by the key. The concept of properties
 405      * is further explained in the {@link AudioFileFormat class description}.
 406      * <p>
 407      * If the specified property is not defined for a particular file format,
 408      * this method returns {@code null}.
 409      *
 410      * @param  key the key of the desired property
 411      * @return the value of the property with the specified key, or {@code null}
 412      *         if the property does not exist
 413      * @see #properties()
 414      * @since 1.5
 415      */
 416     public Object getProperty(String key) {
 417         if (properties == null) {
 418             return null;
 419         }
 420         return properties.get(key);
 421     }
 422 
 423     /**
 424      * Indicates whether this format matches the one specified. To match, two
 425      * formats must have the same encoding, and consistent values of the number
 426      * of channels, sample rate, sample size, frame rate, and frame size. The
 427      * values of the property are consistent if they are equal or the specified
 428      * format has the property value {@code AudioSystem.NOT_SPECIFIED}. The byte
 429      * order (big-endian or little-endian) must be the same if the sample size
 430      * is greater than one byte.
 431      *
 432      * @param  format format to test for match
 433      * @return {@code true} if this format matches the one specified,
 434      *         {@code false} otherwise
 435      */
 436     public boolean matches(AudioFormat format) {
 437         if (format.getEncoding().equals(getEncoding())
 438                 && (format.getChannels() == AudioSystem.NOT_SPECIFIED
 439                     || format.getChannels() == getChannels())
 440                 && (format.getSampleRate() == (float)AudioSystem.NOT_SPECIFIED
 441                     || format.getSampleRate() == getSampleRate())
 442                 && (format.getSampleSizeInBits() == AudioSystem.NOT_SPECIFIED
 443                     || format.getSampleSizeInBits() == getSampleSizeInBits())
 444                 && (format.getFrameRate() == (float)AudioSystem.NOT_SPECIFIED
 445                     || format.getFrameRate() == getFrameRate())
 446                 && (format.getFrameSize() == AudioSystem.NOT_SPECIFIED
 447                     || format.getFrameSize() == getFrameSize())
 448                 && (getSampleSizeInBits() <= 8
 449                     || format.isBigEndian() == isBigEndian())) {
 450             return true;
 451         }
 452         return false;
 453     }
 454 
 455     /**
 456      * Returns a string that describes the format, such as: "PCM SIGNED 22050 Hz
 457      * 16 bit mono big-endian". The contents of the string may vary between
 458      * implementations of Java Sound.
 459      *
 460      * @return a string that describes the format parameters
 461      */
 462     @Override
 463     public String toString() {
 464         String sEncoding = "";
 465         if (getEncoding() != null) {
 466             sEncoding = getEncoding().toString() + " ";
 467         }
 468 
 469         String sSampleRate;
 470         if (getSampleRate() == (float) AudioSystem.NOT_SPECIFIED) {
 471             sSampleRate = "unknown sample rate, ";
 472         } else {
 473             sSampleRate = "" + getSampleRate() + " Hz, ";
 474         }
 475 
 476         String sSampleSizeInBits;
 477         if (getSampleSizeInBits() == (float) AudioSystem.NOT_SPECIFIED) {
 478             sSampleSizeInBits = "unknown bits per sample, ";
 479         } else {
 480             sSampleSizeInBits = "" + getSampleSizeInBits() + " bit, ";
 481         }
 482 
 483         String sChannels;
 484         if (getChannels() == 1) {
 485             sChannels = "mono, ";
 486         } else
 487             if (getChannels() == 2) {
 488                 sChannels = "stereo, ";
 489             } else {
 490                 if (getChannels() == AudioSystem.NOT_SPECIFIED) {
 491                     sChannels = " unknown number of channels, ";
 492                 } else {
 493                     sChannels = ""+getChannels()+" channels, ";
 494                 }
 495             }
 496 
 497         String sFrameSize;
 498         if (getFrameSize() == (float) AudioSystem.NOT_SPECIFIED) {
 499             sFrameSize = "unknown frame size, ";
 500         } else {
 501             sFrameSize = "" + getFrameSize()+ " bytes/frame, ";
 502         }
 503 
 504         String sFrameRate = "";
 505         if (Math.abs(getSampleRate() - getFrameRate()) > 0.00001) {
 506             if (getFrameRate() == (float) AudioSystem.NOT_SPECIFIED) {
 507                 sFrameRate = "unknown frame rate, ";
 508             } else {
 509                 sFrameRate = getFrameRate() + " frames/second, ";
 510             }
 511         }
 512 
 513         String sEndian = "";
 514         if ((getEncoding().equals(Encoding.PCM_SIGNED)
 515              || getEncoding().equals(Encoding.PCM_UNSIGNED))
 516             && ((getSampleSizeInBits() > 8)
 517                 || (getSampleSizeInBits() == AudioSystem.NOT_SPECIFIED))) {
 518             if (isBigEndian()) {
 519                 sEndian = "big-endian";
 520             } else {
 521                 sEndian = "little-endian";
 522             }
 523         }
 524 
 525         return sEncoding
 526             + sSampleRate
 527             + sSampleSizeInBits
 528             + sChannels
 529             + sFrameSize
 530             + sFrameRate
 531             + sEndian;
 532 
 533     }
 534 
 535     /**
 536      * The {@code Encoding} class names the specific type of data representation
 537      * used for an audio stream. The encoding includes aspects of the sound
 538      * format other than the number of channels, sample rate, sample size, frame
 539      * rate, frame size, and byte order.
 540      * <p>
 541      * One ubiquitous type of audio encoding is pulse-code modulation (PCM),
 542      * which is simply a linear (proportional) representation of the sound
 543      * waveform. With PCM, the number stored in each sample is proportional to
 544      * the instantaneous amplitude of the sound pressure at that point in time.
 545      * The numbers may be signed or unsigned integers or floats. Besides PCM,
 546      * other encodings include mu-law and a-law, which are nonlinear mappings of
 547      * the sound amplitude that are often used for recording speech.
 548      * <p>
 549      * You can use a predefined encoding by referring to one of the static
 550      * objects created by this class, such as PCM_SIGNED or PCM_UNSIGNED.
 551      * Service providers can create new encodings, such as compressed audio
 552      * formats, and make these available through the {@link AudioSystem} class.
 553      * <p>
 554      * The {@code Encoding} class is static, so that all {@code AudioFormat}
 555      * objects that have the same encoding will refer to the same object (rather
 556      * than different instances of the same class). This allows matches to be
 557      * made by checking that two format's encodings are equal.
 558      *
 559      * @author Kara Kytle
 560      * @see AudioFormat
 561      * @see javax.sound.sampled.spi.FormatConversionProvider
 562      * @since 1.3
 563      */
 564     public static class Encoding {
 565 
 566         /**
 567          * Specifies signed, linear PCM data.
 568          */
 569         public static final Encoding PCM_SIGNED = new Encoding("PCM_SIGNED");
 570 
 571         /**
 572          * Specifies unsigned, linear PCM data.
 573          */
 574         public static final Encoding PCM_UNSIGNED = new Encoding("PCM_UNSIGNED");
 575 
 576         /**
 577          * Specifies floating-point PCM data.
 578          *
 579          * @since 1.7
 580          */
 581         public static final Encoding PCM_FLOAT = new Encoding("PCM_FLOAT");
 582 
 583         /**
 584          * Specifies u-law encoded data.
 585          */
 586         public static final Encoding ULAW = new Encoding("ULAW");
 587 
 588         /**
 589          * Specifies a-law encoded data.
 590          */
 591         public static final Encoding ALAW = new Encoding("ALAW");
 592 
 593         /**
 594          * Encoding name.
 595          */
 596         private final String name;
 597 
 598         /**
 599          * Constructs a new encoding.
 600          *
 601          * @param  name the name of the new type of encoding
 602          */
 603         public Encoding(final String name) {
 604             this.name = name;
 605         }
 606 
 607         /**
 608          * Finalizes the equals method.
 609          */
 610         @Override
 611         public final boolean equals(final Object obj) {
 612             if (this == obj) {
 613                 return true;
 614             }
 615             if (!(obj instanceof Encoding)) {
 616                 return false;
 617             }
 618             return Objects.equals(name, ((Encoding) obj).name);
 619         }
 620 
 621         /**
 622          * Finalizes the hashCode method.
 623          */
 624         @Override
 625         public final int hashCode() {
 626             return name != null ? name.hashCode() : 0;
 627         }
 628 
 629         /**
 630          * Provides the {@code String} representation of the encoding. This
 631          * {@code String} is the same name that was passed to the constructor.
 632          * For the predefined encodings, the name is similar to the encoding's
 633          * variable (field) name. For example, {@code PCM_SIGNED.toString()}
 634          * returns the name "PCM_SIGNED".
 635          *
 636          * @return the encoding name
 637          */
 638         @Override
 639         public final String toString() {
 640             return name;
 641         }
 642     }
 643 }