1 /*
   2  * Copyright (c) 2010, 2014, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.  Oracle designates this
   8  * particular file as subject to the "Classpath" exception as provided
   9  * by Oracle in the LICENSE file that accompanied this code.
  10  *
  11  * This code is distributed in the hope that it will be useful, but WITHOUT
  12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  14  * version 2 for more details (a copy is included in the LICENSE file that
  15  * accompanied this code).
  16  *
  17  * You should have received a copy of the GNU General Public License version
  18  * 2 along with this work; if not, write to the Free Software Foundation,
  19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  20  *
  21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  22  * or visit www.oracle.com if you need additional information or have any
  23  * questions.
  24  */
  25 
  26 #ifdef HAVE_CONFIG_H
  27 #include <config.h>
  28 #endif
  29 
  30 #include <string.h>
  31 #include <gst/gst.h>
  32 
  33 #include "avcdecoder.h"
  34 
  35 // Note: define as non-zero to enable warnings.
  36 #define ENABLE_WARNINGS 1
  37 
  38 /***************************************************************/
  39 
  40 GST_DEBUG_CATEGORY_STATIC (avcdecoder_debug);
  41 #define GST_CAT_DEFAULT avcdecoder_debug
  42 
  43 /*
  44  * The input capabilities.
  45  */
  46 static GstStaticPadTemplate sink_factory =
  47 GST_STATIC_PAD_TEMPLATE ("sink",
  48                          GST_PAD_SINK,
  49                          GST_PAD_ALWAYS,
  50                          GST_STATIC_CAPS ("video/x-h264")
  51                          );
  52 
  53 /*
  54  * The output capabilities.
  55  */
  56 // Note: For 'yuvs' the format should be "format = (fourcc) YUY2"
  57 static GstStaticPadTemplate src_factory =
  58 GST_STATIC_PAD_TEMPLATE ("src",
  59                          GST_PAD_SRC,
  60                          GST_PAD_ALWAYS,
  61                          GST_STATIC_CAPS ("video/x-raw-ycbcr422, format = (fourcc) UYVY")
  62                          );
  63 
  64 /***********************************************************************************
  65  * Substitution for
  66  * GST_BOILERPLATE (AvcDecoder, avcdecoder, GstElement, GST_TYPE_ELEMENT);
  67  ***********************************************************************************/
  68 static void avcdecoder_base_init (gpointer g_class);
  69 static void avcdecoder_class_init (AvcDecoderClass *g_class);
  70 static void avcdecoder_init (AvcDecoder *object, AvcDecoderClass *g_class);
  71 static void avcdecoder_state_destroy(AvcDecoder *decode);
  72 
  73 static GstElementClass *parent_class = NULL;
  74 
  75 static void avcdecoder_class_init_trampoline (gpointer g_class, gpointer data)
  76 {
  77     parent_class = (GstElementClass *)  g_type_class_peek_parent (g_class);
  78     avcdecoder_class_init ((AvcDecoderClass *)g_class);
  79 }
  80 
  81 GType avcdecoder_get_type (void)
  82 {
  83     static volatile gsize gonce_data = 0;
  84     // INLINE - g_once_init_enter()
  85     if (g_once_init_enter (&gonce_data))
  86     {
  87         GType _type;
  88         _type = gst_type_register_static_full (GST_TYPE_ELEMENT,
  89                                                g_intern_static_string ("AvcDecoder"),
  90                                                sizeof (AvcDecoderClass),
  91                                                avcdecoder_base_init,
  92                                                NULL,
  93                                                avcdecoder_class_init_trampoline,
  94                                                NULL,
  95                                                NULL,
  96                                                sizeof (AvcDecoder),
  97                                                0,
  98                                                (GInstanceInitFunc) avcdecoder_init,
  99                                                NULL,
 100                                                (GTypeFlags) 0);
 101         g_once_init_leave (&gonce_data, (gsize) _type);
 102     }
 103     return (GType) gonce_data;
 104 }
 105 
 106 /*
 107  * Forward declarations.
 108  */
 109 static GstStateChangeReturn avcdecoder_change_state (GstElement* element, GstStateChange transition);
 110 static gboolean avcdecoder_sink_event (GstPad * pad, GstEvent * event);
 111 static GstFlowReturn avcdecoder_chain (GstPad * pad, GstBuffer * buf);
 112 static void avcdecoder_dispose(GObject* object);
 113 
 114 /* --- GObject vmethod implementations --- */
 115 
 116 static void
 117 avcdecoder_base_init (gpointer gclass)
 118 {
 119     GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
 120 
 121     gst_element_class_set_details_simple(element_class,
 122                                          "AVCDecoder",
 123                                          "Codec/Decoder/Video",
 124                                          "Decode raw MPEG-4 H.264 video stream",
 125                                          "Oracle Corporation");
 126 
 127     gst_element_class_add_pad_template (element_class,
 128                                         gst_static_pad_template_get (&src_factory));
 129     gst_element_class_add_pad_template (element_class,
 130                                         gst_static_pad_template_get (&sink_factory));
 131 }
 132 
 133 /*
 134  * Initialize avcdecoder's class.
 135  */
 136 static void
 137 avcdecoder_class_init (AvcDecoderClass * klass)
 138 {
 139     GstElementClass *gstelement_class = (GstElementClass *) klass;
 140     GObjectClass *gobject_class = (GObjectClass*)klass;
 141     
 142     gstelement_class->change_state = avcdecoder_change_state;
 143     
 144     gobject_class->dispose = avcdecoder_dispose;
 145 }
 146 
 147 /*
 148  * Initialize the new element.
 149  * Instantiate pads and add them to element.
 150  * Set pad callback functions.
 151  * Initialize instance structure.
 152  */
 153 static void
 154 avcdecoder_init (AvcDecoder * decode,
 155                  AvcDecoderClass * gclass)
 156 {
 157     // Input.
 158     if (NULL == (decode->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink")))
 159     {
 160 #if ENABLE_WARNINGS
 161         g_warning ("avcdecoder element failed to create sink pad!\n");
 162 #endif
 163         return;
 164     }
 165 
 166     if (FALSE == gst_element_add_pad (GST_ELEMENT (decode), decode->sinkpad))
 167     {
 168 #if ENABLE_WARNINGS
 169         g_warning ("avcdecoder element failed to add sink pad!\n");
 170 #endif
 171     }
 172 
 173     gst_pad_set_chain_function (decode->sinkpad, GST_DEBUG_FUNCPTR(avcdecoder_chain));
 174     gst_pad_set_event_function(decode->sinkpad, avcdecoder_sink_event);
 175 
 176     // Output.
 177     if (NULL == (decode->srcpad = gst_pad_new_from_static_template (&src_factory, "src")))
 178     {
 179 #if ENABLE_WARNINGS
 180         g_warning ("avcdecoder element failed to create sink pad!\n");
 181 #endif
 182         return;
 183     }
 184 
 185     if (TRUE != gst_element_add_pad (GST_ELEMENT (decode), decode->srcpad))
 186     {
 187 #if ENABLE_WARNINGS
 188         g_warning ("avcdecoder element failed to add source pad!\n");
 189 #endif
 190     }
 191 
 192     gst_pad_use_fixed_caps (decode->srcpad);
 193     
 194     decode->mutex = g_mutex_new();
 195 }
 196 
 197 static void
 198 avcdecoder_dispose(GObject* object)
 199 {
 200     AvcDecoder* decode = AVCDECODER(object);
 201     
 202     avcdecoder_state_destroy (decode);
 203     
 204     if (NULL != decode->mutex) {
 205         g_mutex_free(decode->mutex);
 206         decode->mutex = NULL;
 207     }
 208     
 209     G_OBJECT_CLASS(parent_class)->dispose(object);
 210 }
 211 
 212 /* --- GstElement vmethod implementations --- */
 213 
 214 /*
 215  * GCompareDataFunc used to sort GstBuffers into order of ascending timestamp.
 216  */
 217 static gint
 218 avcdecoder_buffer_compare (gconstpointer a, gconstpointer b, gpointer user_data)
 219 {
 220     gint ret = 0;
 221 
 222     if (NULL != a && NULL != b)
 223     {
 224         const GstBuffer* bufa = (const GstBuffer*)a;
 225         const GstBuffer* bufb = (const GstBuffer*)b;
 226 
 227         if (GST_BUFFER_TIMESTAMP_IS_VALID(bufa) && GST_BUFFER_TIMESTAMP_IS_VALID(bufb))
 228         {
 229             GstClockTime ta = GST_BUFFER_TIMESTAMP(bufa);
 230             GstClockTime tb = GST_BUFFER_TIMESTAMP(bufb);
 231             if (ta < tb)
 232             {
 233                 ret = -1;
 234             }
 235             else if (ta > tb)
 236             {
 237                 ret = 1;
 238             }
 239             // else ret = 0 by default.
 240         }
 241     }
 242 
 243     return ret;
 244 }
 245 
 246 /*
 247  * Callback which receives decoded video frames from the VDADecoder. The
 248  * decoded frames are not guaranteed to be in timestamp-order and it is
 249  * unknown how many frames there are between I-frames. Frames are pushed
 250  * in the order received to a GAsyncQueue. This data type is used as there
 251  * is no apparent way without causing a deadlock to lock a sorted queue or
 252  * sequence by both this callback and the function which sorts the frames
 253  * in timestamp-order.
 254  */
 255 static void
 256 avcdecoder_decoder_output_callback (void* userData,
 257                                     CFDictionaryRef frameInfo,
 258                                     OSStatus status,
 259                                     uint32_t infoFlags,
 260                                     CVImageBufferRef imageBuffer)
 261 {
 262     AvcDecoder *decode = AVCDECODER (userData);
 263     
 264     if(decode->is_flushing)
 265     {
 266         return;
 267     }
 268 
 269     // Check whether there is a problem.
 270 
 271     gboolean isGap = FALSE;
 272 
 273     if (kVDADecoderNoErr != status)
 274     {
 275 #if ENABLE_WARNINGS
 276         g_warning("output callback received status %d\n", (int)status);
 277 #endif
 278         isGap = TRUE;
 279     } else if (1UL << 1 == (infoFlags & (1UL << 1))) // XXX hard-coding
 280     {
 281 #if ENABLE_WARNINGS
 282         g_warning("output callback called on dropped frame\n");
 283 #endif
 284         isGap = TRUE;
 285     } else if (NULL == imageBuffer)
 286     {
 287 #if ENABLE_WARNINGS
 288         g_warning ("output callback received NULL image buffer!\n");
 289 #endif
 290         isGap = TRUE;
 291     } else if ('2vuy' != CVPixelBufferGetPixelFormatType(imageBuffer))
 292     {
 293 #if ENABLE_WARNINGS
 294         g_warning("output callback image buffer format not '2vuy'\n");
 295 #endif
 296         isGap = TRUE;
 297     }
 298 
 299     // Retrieve the timestamp and delta flag.
 300 
 301     int64_t timestamp = 0;
 302     int32_t deltaFlag = 0; // deltaFlag == 0 indicates an intra-frame, non-zero an inter-frame.
 303     if (NULL != frameInfo)
 304     {
 305         CFNumberRef timestampRef = CFDictionaryGetValue(frameInfo, CFSTR("timestamp"));
 306         if (timestampRef)
 307         {
 308             CFNumberGetValue(timestampRef, kCFNumberSInt64Type, &timestamp);
 309         }
 310         CFNumberRef deltaFlagRef = CFDictionaryGetValue(frameInfo, CFSTR("deltaFlag"));
 311         if (deltaFlagRef)
 312         {
 313             CFNumberGetValue(deltaFlagRef, kCFNumberSInt32Type, &deltaFlag);
 314         }
 315     }
 316 
 317     if (timestamp < decode->segment_start)
 318     {
 319         return;
 320     }
 321 
 322     GstBuffer* buf = NULL;
 323     
 324     if (isGap)
 325     {
 326         // Push a flagged, empty buffer it there is a problem.
 327 
 328         buf = gst_buffer_new();
 329         GST_BUFFER_TIMESTAMP(buf) = timestamp;
 330         GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_GAP);
 331     }
 332     else
 333     {
 334         // Push a valid buffer.
 335 
 336         CVBufferRetain(imageBuffer); // return value equals parameter
 337 
 338         GstPad* srcpad = decode->srcpad;
 339 
 340         size_t width = CVPixelBufferGetWidth(imageBuffer);
 341         size_t height = CVPixelBufferGetHeight(imageBuffer);
 342         size_t bytes_per_row = CVPixelBufferGetBytesPerRow(imageBuffer);
 343         if(!decode->is_stride_set)
 344         {
 345             GstStructure* caps_struct = gst_caps_get_structure(GST_PAD_CAPS(srcpad), 0);
 346             gst_structure_set(caps_struct, "line_stride", G_TYPE_INT, (int)bytes_per_row, NULL);
 347             decode->is_stride_set = TRUE;
 348         }
 349         if (kCVReturnSuccess == CVPixelBufferLockBaseAddress (imageBuffer, 0))
 350         {
 351             void* image_data = CVPixelBufferGetBaseAddress(imageBuffer);
 352             if (GST_FLOW_OK == gst_pad_alloc_buffer_and_set_caps (srcpad, 0, bytes_per_row*height,
 353                                                                   GST_PAD_CAPS(srcpad),
 354                                                                   &buf))
 355             {
 356                 guint8* buffer_data = GST_BUFFER_DATA (buf);
 357 
 358                 memcpy (buffer_data, image_data, GST_BUFFER_SIZE (buf));
 359                 GST_BUFFER_TIMESTAMP(buf) = timestamp;
 360             }
 361 
 362             CVPixelBufferUnlockBaseAddress (imageBuffer, 0); // ignore return value
 363         }
 364 
 365         CVBufferRelease(imageBuffer);
 366 
 367         if (!buf)
 368         {
 369             buf = gst_buffer_new();
 370             GST_BUFFER_TIMESTAMP(buf) = timestamp;
 371             GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_GAP);
 372         }
 373     }
 374 
 375     // the callback might be called from several threads
 376     // need to synchronize ordered_frames queue access
 377     g_mutex_lock(decode->mutex);
 378 
 379     g_queue_insert_sorted(decode->ordered_frames, buf, avcdecoder_buffer_compare, NULL);
 380     
 381     GstBuffer* frame;
 382     GstFlowReturn ret = GST_FLOW_OK;
 383     while(ret == GST_FLOW_OK && !decode->is_flushing && NULL != (frame = g_queue_peek_head(decode->ordered_frames)))
 384     {
 385         GstClockTime ts = GST_BUFFER_TIMESTAMP(frame);
 386         if(GST_CLOCK_TIME_NONE == decode->previous_timestamp ||         // first frame
 387            ts <= decode->previous_timestamp + decode->timestamp_ceil || // frame is at next timestamp
 388            (0 == deltaFlag && ts < timestamp))                          // have newer I-frame
 389         {
 390             decode->previous_timestamp = ts;
 391             g_queue_pop_head(decode->ordered_frames);
 392 
 393             if(GST_BUFFER_FLAG_IS_SET(frame, GST_BUFFER_FLAG_GAP))
 394             {
 395                 // INLINE - gst_buffer_unref()
 396                 gst_buffer_unref (frame);
 397             }
 398             else
 399             {
 400                 if(decode->is_newsegment)
 401                 {
 402                     GST_BUFFER_FLAG_SET(frame, GST_BUFFER_FLAG_DISCONT);
 403                     decode->is_newsegment = FALSE;
 404                 }
 405 
 406                 // it's better not to call gst_pad_push under mutex to avoid deadlocks
 407                 g_mutex_unlock(decode->mutex);
 408                 ret = gst_pad_push(decode->srcpad, frame);
 409                 g_mutex_lock(decode->mutex);
 410             }
 411         }
 412         else
 413         {
 414             break;
 415         }
 416     }
 417     
 418     g_mutex_unlock(decode->mutex);
 419 }
 420 
 421 /*
 422  * GFunc used to unref GstBuffers in a queue.
 423  */
 424 static void
 425 avcdecoder_element_destroy(gpointer data, gpointer user_data)
 426 {
 427     if (NULL != data)
 428     {
 429         GstBuffer* buf = (GstBuffer*)data;
 430 
 431         // INLINE - gst_buffer_unref()
 432         gst_buffer_unref (buf);
 433     }
 434 }
 435 
 436 /**
 437  * Initialize the AvcDecoder structure. This should happen
 438  * only once, before decoding begins.
 439  */
 440 static void
 441 avcdecoder_state_init(AvcDecoder *decode)
 442 {
 443     decode->outputCallback = (VDADecoderOutputCallback*)avcdecoder_decoder_output_callback;
 444     decode->decoder = NULL;
 445     decode->is_initialized = FALSE;
 446     decode->is_newsegment = FALSE;
 447     decode->is_stride_set = FALSE;
 448     decode->frame_duration = GST_CLOCK_TIME_NONE;
 449     decode->ordered_frames = g_queue_new();
 450     decode->segment_start = 0;
 451 }
 452 
 453 /**
 454  * Reset the state of the AvcDecoder structure.
 455  */
 456 static void
 457 avcdecoder_state_reset(AvcDecoder *decode)
 458 {
 459     // Flush the decoder.
 460     if (NULL != decode->decoder)
 461     {
 462         OSStatus result = VDADecoderFlush (decode->decoder, 0);
 463 #if ENABLE_WARNINGS
 464         if (kVDADecoderNoErr != result)
 465         {
 466             g_warning ("Could not flush decoder: result code %d\n", (int)result);
 467         }
 468 #endif
 469     }
 470 
 471     g_mutex_lock(decode->mutex);
 472 
 473     // Unref all sorted buffers and clear the associated queue.
 474     if (NULL != decode->ordered_frames)
 475     {
 476         g_queue_foreach(decode->ordered_frames, avcdecoder_element_destroy, NULL);
 477         g_queue_clear(decode->ordered_frames);
 478     }
 479 
 480     decode->is_newsegment = FALSE;
 481     decode->segment_start = 0;
 482 
 483     g_mutex_unlock(decode->mutex);
 484 }
 485 
 486 /**
 487  * Reset and then destroy the state of the AvcDecoder structure.
 488  */
 489 static void
 490 avcdecoder_state_destroy(AvcDecoder *decode)
 491 {
 492     // Reset the state.
 493     avcdecoder_state_reset(decode);
 494 
 495     // Release the VDADecoder.
 496     if (NULL != decode->decoder)
 497     {
 498         OSStatus result = VDADecoderDestroy (decode->decoder);
 499 #if ENABLE_WARNINGS
 500         if (kVDADecoderNoErr != result)
 501         {
 502             g_warning ("Could not destroy decoder: result code %d\n", (int)result);
 503         }
 504 #endif
 505         decode->decoder = NULL;
 506     }
 507 
 508     // Free the sorted queue.
 509     if (NULL != decode->ordered_frames)
 510     {
 511         g_queue_free(decode->ordered_frames);
 512         decode->ordered_frames = NULL;
 513     }
 514 }
 515 
 516 /*
 517  * Perform processing needed for state transitions.
 518  */
 519 static GstStateChangeReturn
 520 avcdecoder_change_state (GstElement* element, GstStateChange transition)
 521 {
 522     AvcDecoder *decode = AVCDECODER(element);
 523 
 524     switch(transition)
 525     {
 526         case GST_STATE_CHANGE_NULL_TO_READY:
 527             // Initialize the AvcDecoder structure.
 528             avcdecoder_state_init (decode);
 529             break;
 530         default:
 531             break;
 532     }
 533 
 534     // Change state.
 535     return parent_class->change_state(element, transition);
 536 }
 537 
 538 /*
 539  * FLUSH_START, NEWSEGMENT, and FLUSH_STOP are recognized and forwarded;
 540  * all others are simply forwarded.
 541  */
 542 static gboolean
 543 avcdecoder_sink_event (GstPad * pad, GstEvent * event)
 544 {
 545     gboolean ret;
 546     GstObject *parent = gst_object_get_parent((GstObject*)pad);
 547     AvcDecoder *decode = AVCDECODER (GST_OBJECT_PARENT (pad));
 548     GstEvent *newsegment = NULL;
 549 
 550     switch (GST_EVENT_TYPE (event))
 551     {
 552         case GST_EVENT_FLUSH_START:
 553         {
 554             // Start flushing buffers.
 555 
 556             // Set flag so chain function refuses buffers.
 557             decode->is_flushing = TRUE;
 558 
 559             break;
 560         }
 561 
 562         case GST_EVENT_FLUSH_STOP:
 563         {
 564             // Stop flushing buffers.
 565             avcdecoder_state_reset(decode);
 566 
 567             // Unset flag so chain function accepts buffers.
 568             decode->is_flushing = FALSE;
 569 
 570             break;
 571         }
 572 
 573         case GST_EVENT_NEWSEGMENT:
 574         {
 575             // Set a flag indicating a new segment has begun.
 576             decode->is_newsegment = TRUE;
 577             decode->previous_timestamp = GST_CLOCK_TIME_NONE;
 578             GstFormat segment_format;
 579             gint64 start;
 580             gst_event_parse_new_segment(event, NULL, NULL, &segment_format,
 581                                         &start, NULL, NULL);
 582             if(GST_FORMAT_TIME == segment_format)
 583             {
 584                 decode->segment_start = start;
 585             }
 586             break;
 587         }
 588 
 589         default:
 590             break;
 591     }
 592 
 593     // Push the event downstream.
 594     ret = gst_pad_push_event (decode->srcpad, event);
 595 
 596     // Unlock the parent object.
 597     gst_object_unref(parent);
 598 
 599     return ret;
 600 }
 601 
 602 /*
 603  * Processes a buffer of AVC-encoded video data pushed to the sink pad.
 604  */
 605 static GstFlowReturn
 606 avcdecoder_chain (GstPad * pad, GstBuffer * buf)
 607 {
 608     GstFlowReturn ret = GST_FLOW_OK;
 609     AvcDecoder *decode = AVCDECODER (GST_OBJECT_PARENT (pad));
 610     OSStatus status = kVDADecoderNoErr;
 611 //    g_print("chain - time %f discont %d flags %d\n",
 612 //            (float)GST_BUFFER_TIMESTAMP(buf)/(float)GST_SECOND,
 613 //            (int)GST_BUFFER_IS_DISCONT(buf), (int)GST_BUFFER_FLAGS(buf));
 614 
 615     // If between FLUSH_START and FLUSH_STOP, reject new buffers.
 616     if (decode->is_flushing)
 617     {
 618         // Unref the input buffer.
 619         // INLINE - gst_buffer_unref()
 620         gst_buffer_unref(buf);
 621 
 622         return GST_FLOW_WRONG_STATE;
 623     }
 624 
 625     // Initialize the element structure.
 626     if (FALSE == decode->is_initialized)
 627     {
 628         // Obtain configuration data from the "codec_data" structure in the sink caps.
 629         GstCaps* videoSpecificCaps = GST_BUFFER_CAPS (buf);
 630         if (NULL == videoSpecificCaps || gst_caps_get_size(videoSpecificCaps) < 1)
 631         {
 632             // INLINE - gst_buffer_unref()
 633             gst_buffer_unref(buf);
 634             return GST_FLOW_ERROR;
 635         }
 636 
 637         GstStructure* videoSpecificStructure = gst_caps_get_structure (videoSpecificCaps, 0);
 638 
 639         const GValue *videoSpecificValue = gst_structure_get_value(videoSpecificStructure, "codec_data");
 640         if (NULL == videoSpecificValue)
 641         {
 642             // INLINE - gst_buffer_unref()
 643             gst_buffer_unref(buf);
 644             return GST_FLOW_ERROR;
 645         }
 646 
 647         gint encoded_width;
 648         if (!gst_structure_get_int (videoSpecificStructure, "width", &encoded_width))
 649             encoded_width = 0;
 650 
 651         gint encoded_height;
 652         if (!gst_structure_get_int (videoSpecificStructure, "height", &encoded_height))
 653             encoded_height = 0;
 654 
 655         gint framerate_num;
 656         gint framerate_den;
 657         if (!gst_structure_get_fraction (videoSpecificStructure, "framerate", &framerate_num, &framerate_den))
 658         {
 659             framerate_num = 25;
 660             framerate_den = 1;
 661         }
 662 
 663         // Calculate frame duration and timestamp bound.
 664         decode->frame_duration = gst_util_uint64_scale_int_ceil(GST_SECOND, framerate_den, framerate_num);
 665         decode->timestamp_ceil = (GstClockTime)(1.5*decode->frame_duration + 0.5);
 666 
 667         GstBuffer*  videoSpecificBuffer = gst_value_get_buffer (videoSpecificValue);
 668         guint8* videoSpecificData = GST_BUFFER_DATA (videoSpecificBuffer);
 669         guint videoSpecificDataLength = GST_BUFFER_SIZE (videoSpecificBuffer);
 670 
 671         SInt32 avcWidth = (SInt32)encoded_width;
 672         SInt32 avcHeight = (SInt32)encoded_height;
 673 
 674         // Set up parameters required to create the VDADecoder.
 675         CFNumberRef width = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &avcWidth);
 676         CFNumberRef height = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &avcHeight);
 677         SInt32 sourceFormat = 'avc1';
 678         CFNumberRef avcFormat = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &sourceFormat);
 679         CFDataRef avcCData = CFDataCreate(kCFAllocatorDefault, videoSpecificData, videoSpecificDataLength);
 680 
 681         CFMutableDictionaryRef decoderConfiguration = (CFDictionaryCreateMutable(kCFAllocatorDefault, 4, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
 682 
 683         CFDictionarySetValue(decoderConfiguration, kVDADecoderConfiguration_Height, height);
 684         CFDictionarySetValue(decoderConfiguration, kVDADecoderConfiguration_Width, width);
 685         CFDictionarySetValue(decoderConfiguration, kVDADecoderConfiguration_SourceFormat, avcFormat);
 686         CFDictionarySetValue(decoderConfiguration, kVDADecoderConfiguration_avcCData, avcCData);
 687 
 688         // Note: For 'yuvs' the formatType should be kYUVSPixelFormat.
 689         SInt32 formatType = k2vuyPixelFormat;
 690         CFNumberRef imgFormat = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &formatType);
 691         CFMutableDictionaryRef destinationImageBufferAttributes = CFDictionaryCreateMutable(kCFAllocatorDefault, 2, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
 692 
 693         // empty IOSurface properties dictionary
 694         CFDictionaryRef emptyDictionary = CFDictionaryCreate(kCFAllocatorDefault,
 695                                                              NULL,
 696                                                              NULL,
 697                                                              0,
 698                                                              &kCFTypeDictionaryKeyCallBacks,
 699                                                              &kCFTypeDictionaryValueCallBacks);
 700 
 701         CFDictionarySetValue(destinationImageBufferAttributes,
 702                              kCVPixelBufferPixelFormatTypeKey, imgFormat);
 703         CFDictionarySetValue(destinationImageBufferAttributes,
 704                              kCVPixelBufferIOSurfacePropertiesKey,
 705                              emptyDictionary); // XXX probably should delete this.
 706 
 707         // Create the VDADecoder.
 708         status = VDADecoderCreate(decoderConfiguration,
 709                                   destinationImageBufferAttributes,
 710                                   (VDADecoderOutputCallback *)decode->outputCallback,
 711                                   (void *)decode,
 712                                   &decode->decoder);
 713 
 714         if (decoderConfiguration)
 715             CFRelease(decoderConfiguration);
 716         if (destinationImageBufferAttributes)
 717             CFRelease(destinationImageBufferAttributes);
 718         if (emptyDictionary)
 719             CFRelease(emptyDictionary);
 720         if (avcCData)
 721             CFRelease(avcCData);
 722 
 723         if (kVDADecoderNoErr == status)
 724         {
 725             // Set the srcpad caps.
 726 
 727             // Note: For 'yuvs' the format should be GST_MAKE_FOURCC ('Y', 'U', 'Y', '2')
 728             GstCaps* caps = gst_caps_new_simple (
 729                                                  "video/x-raw-ycbcr422",
 730                                                  "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'),
 731                                                  "framerate", GST_TYPE_FRACTION, framerate_num, framerate_den,
 732                                                  "width", G_TYPE_INT, encoded_width,
 733                                                  "height", G_TYPE_INT, encoded_height,
 734                                                  NULL);
 735             gst_pad_set_caps (decode->srcpad, caps);
 736             gst_caps_unref (caps);
 737 
 738             decode->is_initialized = TRUE;
 739         }
 740         else
 741         {
 742 #if ENABLE_WARNINGS
 743             const char* message;
 744             switch (status)
 745             {
 746                 case kVDADecoderHardwareNotSupportedErr:
 747                     message = "hardware does not support accelerated video decode services";
 748                     break;
 749                 case kVDADecoderFormatNotSupportedErr:
 750                     message = "hardware decoder does not support requested output format";
 751                     break;
 752                 case kVDADecoderConfigurationError:
 753                     message = "unsupported hardware decoder configuration parameters";
 754                     break;
 755                 case kVDADecoderDecoderFailedErr:
 756                     message = "hardware decoder resources in use by another process or cannot decode the source into the requested format";
 757                     break;
 758                 default:
 759                     message = "unknown error";
 760                     break;
 761             }
 762             g_warning ("Could not create decoder: result code %d, %s", (int)status, message);
 763 #endif
 764 
 765             // Post an error message to the pipeline bus.
 766             GError* error = g_error_new (g_quark_from_string("AVCDecoder"), 666, "%s", message);
 767             GstMessage* msg = gst_message_new_error (GST_OBJECT (decode), error, message);
 768             gst_element_post_message(GST_ELEMENT(decode), msg);
 769 
 770             ret = GST_FLOW_ERROR;
 771         }
 772     }
 773 
 774     if (GST_FLOW_OK == ret)
 775     {
 776         // Set the timestamp of the encoded frame.
 777         int64_t timestamp = GST_BUFFER_TIMESTAMP (buf);
 778         CFStringRef timestamp_key = CFSTR("timestamp");
 779         CFNumberRef timestamp_value = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &timestamp);
 780         int32_t deltaFlag = (int32_t)(GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT) ?
 781                                       GST_BUFFER_FLAG_DELTA_UNIT : 0);
 782         CFStringRef delta_key = CFSTR("deltaFlag");
 783         CFNumberRef delta_value = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &deltaFlag);
 784         CFStringRef keys[2];
 785         CFNumberRef values[2];
 786         keys[0] = timestamp_key;
 787         keys[1] = delta_key;
 788         values[0] = timestamp_value;
 789         values[1] = delta_value;
 790         CFDictionaryRef frame_info = CFDictionaryCreate(kCFAllocatorDefault,
 791                                                         (const void **)&keys,
 792                                                         (const void **)&values,
 793                                                         2,
 794                                                         &kCFTypeDictionaryKeyCallBacks,
 795                                                         &kCFTypeDictionaryValueCallBacks);
 796         CFTypeRef buffer = CFDataCreate(kCFAllocatorDefault, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
 797 
 798         // Send the encoded frame to the VDADecoder.
 799         status = VDADecoderDecode (decode->decoder, 0, buffer, frame_info);
 800         CFRelease(buffer);
 801         CFRelease(frame_info);
 802 
 803         if (kVDADecoderNoErr != status)
 804         {
 805 #if ENABLE_WARNINGS
 806             g_warning ("Could not decode data: result code %d\n", (int)status);
 807 #endif
 808 
 809             // Set an error return code only if this was not a "simple" decoding error.
 810             if (kVDADecoderDecoderFailedErr != status)
 811             {
 812                 ret = GST_FLOW_ERROR;
 813             }
 814         }
 815     }
 816 
 817     // INLINE - gst_buffer_unref()
 818     gst_buffer_unref (buf);
 819 
 820     return ret;
 821 }
 822 
 823 // --------------------------------------------------------------------------
 824 gboolean avcdecoder_plugin_init (GstPlugin * avcdecoder)
 825 {
 826     /* debug category for fltering log messages
 827      *
 828      * exchange the string 'Template avcdecoder' with your description
 829      */
 830     GST_DEBUG_CATEGORY_INIT (avcdecoder_debug, "avcdecoder",
 831                              0, "Template avcdecoder"); // FIXME
 832 
 833     return gst_element_register (avcdecoder, "avcdecoder", 512, TYPE_AVCDECODER);
 834 }