1 /*
   2  * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.  Oracle designates this
   8  * particular file as subject to the "Classpath" exception as provided
   9  * by Oracle in the LICENSE file that accompanied this code.
  10  *
  11  * This code is distributed in the hope that it will be useful, but WITHOUT
  12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  14  * version 2 for more details (a copy is included in the LICENSE file that
  15  * accompanied this code).
  16  *
  17  * You should have received a copy of the GNU General Public License version
  18  * 2 along with this work; if not, write to the Free Software Foundation,
  19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  20  *
  21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  22  * or visit www.oracle.com if you need additional information or have any
  23  * questions.
  24  */
  25 
  26 #ifdef HAVE_CONFIG_H
  27 #include <config.h>
  28 #endif
  29 
  30 #include <string.h>
  31 #include <gst/gst.h>
  32 
  33 #include "avcdecoder.h"
  34 
  35 // Note: define as non-zero to enable warnings.
  36 #define ENABLE_WARNINGS 1
  37 
  38 /***************************************************************/
  39 
  40 GST_DEBUG_CATEGORY_STATIC (avcdecoder_debug);
  41 #define GST_CAT_DEFAULT avcdecoder_debug
  42 
  43 /*
  44  * The input capabilities.
  45  */
  46 static GstStaticPadTemplate sink_factory =
  47 GST_STATIC_PAD_TEMPLATE ("sink",
  48                          GST_PAD_SINK,
  49                          GST_PAD_ALWAYS,
  50                          GST_STATIC_CAPS ("video/x-h264")
  51                          );
  52 
  53 /*
  54  * The output capabilities.
  55  */
  56 // Note: For 'yuvs' the format should be "format = (fourcc) YUY2"
  57 static GstStaticPadTemplate src_factory =
  58 GST_STATIC_PAD_TEMPLATE ("src",
  59                          GST_PAD_SRC,
  60                          GST_PAD_ALWAYS,
  61                          GST_STATIC_CAPS ("video/x-raw-ycbcr422, format = (fourcc) UYVY")
  62                          );
  63 
  64 /***********************************************************************************
  65  * Substitution for
  66  * GST_BOILERPLATE (AvcDecoder, avcdecoder, GstElement, GST_TYPE_ELEMENT);
  67  ***********************************************************************************/
  68 static void avcdecoder_base_init (gpointer g_class);
  69 static void avcdecoder_class_init (AvcDecoderClass *g_class);
  70 static void avcdecoder_init (AvcDecoder *object, AvcDecoderClass *g_class);
  71 static void avcdecoder_state_destroy(AvcDecoder *decode);
  72 
  73 static GstElementClass *parent_class = NULL;
  74 
  75 static void avcdecoder_class_init_trampoline (gpointer g_class, gpointer data)
  76 {
  77     parent_class = (GstElementClass *)  g_type_class_peek_parent (g_class);
  78     avcdecoder_class_init ((AvcDecoderClass *)g_class);
  79 }
  80 
  81 GType avcdecoder_get_type (void)
  82 {
  83     static volatile gsize gonce_data = 0;
  84     // INLINE - g_once_init_enter()
  85     if (g_once_init_enter (&gonce_data))
  86     {
  87         GType _type;
  88         _type = gst_type_register_static_full (GST_TYPE_ELEMENT,
  89                                                g_intern_static_string ("AvcDecoder"),
  90                                                sizeof (AvcDecoderClass),
  91                                                avcdecoder_base_init,
  92                                                NULL,
  93                                                avcdecoder_class_init_trampoline,
  94                                                NULL,
  95                                                NULL,
  96                                                sizeof (AvcDecoder),
  97                                                0,
  98                                                (GInstanceInitFunc) avcdecoder_init,
  99                                                NULL,
 100                                                (GTypeFlags) 0);
 101         g_once_init_leave (&gonce_data, (gsize) _type);
 102     }
 103     return (GType) gonce_data;
 104 }
 105 
 106 /*
 107  * Forward declarations.
 108  */
 109 static GstStateChangeReturn avcdecoder_change_state (GstElement* element, GstStateChange transition);
 110 static gboolean avcdecoder_sink_event (GstPad * pad, GstEvent * event);
 111 static GstFlowReturn avcdecoder_chain (GstPad * pad, GstBuffer * buf);
 112 static void avcdecoder_dispose(GObject* object);
 113 
 114 /* --- GObject vmethod implementations --- */
 115 
 116 static void
 117 avcdecoder_base_init (gpointer gclass)
 118 {
 119     GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
 120 
 121     gst_element_class_set_details_simple(element_class,
 122                                          "AVCDecoder",
 123                                          "Codec/Decoder/Video",
 124                                          "Decode raw MPEG-4 H.264 video stream",
 125                                          "Oracle Corporation");
 126 
 127     gst_element_class_add_pad_template (element_class,
 128                                         gst_static_pad_template_get (&src_factory));
 129     gst_element_class_add_pad_template (element_class,
 130                                         gst_static_pad_template_get (&sink_factory));
 131 }
 132 
 133 /*
 134  * Initialize avcdecoder's class.
 135  */
 136 static void
 137 avcdecoder_class_init (AvcDecoderClass * klass)
 138 {
 139     GstElementClass *gstelement_class = (GstElementClass *) klass;
 140     GObjectClass *gobject_class = (GObjectClass*)klass;
 141     
 142     gstelement_class->change_state = avcdecoder_change_state;
 143     
 144     gobject_class->dispose = avcdecoder_dispose;
 145 }
 146 
 147 /*
 148  * Initialize the new element.
 149  * Instantiate pads and add them to element.
 150  * Set pad callback functions.
 151  * Initialize instance structure.
 152  */
 153 static void
 154 avcdecoder_init (AvcDecoder * decode,
 155                  AvcDecoderClass * gclass)
 156 {
 157     // Input.
 158     if (NULL == (decode->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink")))
 159     {
 160 #if ENABLE_WARNINGS
 161         g_warning ("avcdecoder element failed to create sink pad!\n");
 162 #endif
 163         return;
 164     }
 165 
 166     if (FALSE == gst_element_add_pad (GST_ELEMENT (decode), decode->sinkpad))
 167     {
 168 #if ENABLE_WARNINGS
 169         g_warning ("avcdecoder element failed to add sink pad!\n");
 170 #endif
 171     }
 172 
 173     gst_pad_set_chain_function (decode->sinkpad, GST_DEBUG_FUNCPTR(avcdecoder_chain));
 174     gst_pad_set_event_function(decode->sinkpad, avcdecoder_sink_event);
 175 
 176     // Output.
 177     if (NULL == (decode->srcpad = gst_pad_new_from_static_template (&src_factory, "src")))
 178     {
 179 #if ENABLE_WARNINGS
 180         g_warning ("avcdecoder element failed to create sink pad!\n");
 181 #endif
 182         return;
 183     }
 184 
 185     if (TRUE != gst_element_add_pad (GST_ELEMENT (decode), decode->srcpad))
 186     {
 187 #if ENABLE_WARNINGS
 188         g_warning ("avcdecoder element failed to add source pad!\n");
 189 #endif
 190     }
 191 
 192     gst_pad_use_fixed_caps (decode->srcpad);
 193 }
 194 
 195 static void
 196 avcdecoder_dispose(GObject* object)
 197 {
 198     AvcDecoder* decode = AVCDECODER(object);
 199     
 200     avcdecoder_state_destroy (decode);
 201     
 202     G_OBJECT_CLASS(parent_class)->dispose(object);
 203 }
 204 
 205 /* --- GstElement vmethod implementations --- */
 206 
 207 /*
 208  * GCompareDataFunc used to sort GstBuffers into order of ascending timestamp.
 209  */
 210 static gint
 211 avcdecoder_buffer_compare (gconstpointer a, gconstpointer b, gpointer user_data)
 212 {
 213     gint ret = 0;
 214 
 215     if (NULL != a && NULL != b)
 216     {
 217         const GstBuffer* bufa = (const GstBuffer*)a;
 218         const GstBuffer* bufb = (const GstBuffer*)b;
 219 
 220         if (GST_BUFFER_TIMESTAMP_IS_VALID(bufa) && GST_BUFFER_TIMESTAMP_IS_VALID(bufb))
 221         {
 222             GstClockTime ta = GST_BUFFER_TIMESTAMP(bufa);
 223             GstClockTime tb = GST_BUFFER_TIMESTAMP(bufb);
 224             if (ta < tb)
 225             {
 226                 ret = -1;
 227             }
 228             else if (ta > tb)
 229             {
 230                 ret = 1;
 231             }
 232             // else ret = 0 by default.
 233         }
 234     }
 235 
 236     return ret;
 237 }
 238 
 239 /*
 240  * Callback which receives decoded video frames from the VDADecoder. The
 241  * decoded frames are not guaranteed to be in timestamp-order and it is
 242  * unknown how many frames there are between I-frames. Frames are pushed
 243  * in the order received to a GAsyncQueue. This data type is used as there
 244  * is no apparent way without causing a deadlock to lock a sorted queue or
 245  * sequence by both this callback and the function which sorts the frames
 246  * in timestamp-order.
 247  */
 248 static void
 249 avcdecoder_decoder_output_callback (void* userData,
 250                                     CFDictionaryRef frameInfo,
 251                                     OSStatus status,
 252                                     uint32_t infoFlags,
 253                                     CVImageBufferRef imageBuffer)
 254 {
 255     AvcDecoder *decode = AVCDECODER (userData);
 256 
 257     if(decode->is_flushing)
 258     {
 259         return;
 260     }
 261 
 262     // Check whether there is a problem.
 263 
 264     gboolean isGap = FALSE;
 265 
 266     if (kVDADecoderNoErr != status)
 267     {
 268 #if ENABLE_WARNINGS
 269         g_warning("output callback received status %d\n", (int)status);
 270 #endif
 271         isGap = TRUE;
 272     } else if (1UL << 1 == (infoFlags & (1UL << 1))) // XXX hard-coding
 273     {
 274 #if ENABLE_WARNINGS
 275         g_warning("output callback called on dropped frame\n");
 276 #endif
 277         isGap = TRUE;
 278     } else if (NULL == imageBuffer)
 279     {
 280 #if ENABLE_WARNINGS
 281         g_warning ("output callback received NULL image buffer!\n");
 282 #endif
 283         isGap = TRUE;
 284     } else if ('2vuy' != CVPixelBufferGetPixelFormatType(imageBuffer))
 285     {
 286 #if ENABLE_WARNINGS
 287         g_warning("output callback image buffer format not '2vuy'\n");
 288 #endif
 289         isGap = TRUE;
 290     }
 291 
 292     // Retrieve the timestamp and delta flag.
 293 
 294     int64_t timestamp = 0;
 295     int32_t deltaFlag = 0; // deltaFlag == 0 indicates an intra-frame, non-zero an inter-frame.
 296     if (NULL != frameInfo)
 297     {
 298         CFNumberRef timestampRef = CFDictionaryGetValue(frameInfo, CFSTR("timestamp"));
 299         if (timestampRef)
 300         {
 301             CFNumberGetValue(timestampRef, kCFNumberSInt64Type, &timestamp);
 302         }
 303         CFNumberRef deltaFlagRef = CFDictionaryGetValue(frameInfo, CFSTR("deltaFlag"));
 304         if (deltaFlagRef)
 305         {
 306             CFNumberGetValue(deltaFlagRef, kCFNumberSInt32Type, &deltaFlag);
 307         }
 308     }
 309 
 310     if (timestamp < decode->segment_start)
 311     {
 312         return;
 313     }
 314 
 315     if (isGap)
 316     {
 317         // Push a flagged, empty buffer it there is a problem.
 318 
 319         GstBuffer* buf = gst_buffer_new();
 320         GST_BUFFER_TIMESTAMP(buf) = timestamp;
 321         GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_GAP);
 322         g_queue_insert_sorted(decode->ordered_frames, buf, avcdecoder_buffer_compare, NULL);
 323     }
 324     else
 325     {
 326         // Push a valid buffer.
 327 
 328         CVBufferRetain(imageBuffer); // return value equals parameter
 329 
 330         GstPad* srcpad = decode->srcpad;
 331 
 332         size_t width = CVPixelBufferGetWidth(imageBuffer);
 333         size_t height = CVPixelBufferGetHeight(imageBuffer);
 334         size_t bytes_per_row = CVPixelBufferGetBytesPerRow(imageBuffer);
 335         if(!decode->is_stride_set)
 336         {
 337             GstStructure* caps_struct = gst_caps_get_structure(GST_PAD_CAPS(srcpad), 0);
 338             gst_structure_set(caps_struct, "line_stride", G_TYPE_INT, (int)bytes_per_row, NULL);
 339             decode->is_stride_set = TRUE;
 340         }
 341         gboolean is_buffer_enqueued = FALSE;
 342         if (kCVReturnSuccess == CVPixelBufferLockBaseAddress (imageBuffer, 0))
 343         {
 344             void* image_data = CVPixelBufferGetBaseAddress(imageBuffer);
 345             GstBuffer* buf;
 346             if (GST_FLOW_OK == gst_pad_alloc_buffer_and_set_caps (srcpad, 0, bytes_per_row*height,
 347                                                                   GST_PAD_CAPS(srcpad),
 348                                                                   &buf))
 349             {
 350                 guint8* buffer_data = GST_BUFFER_DATA (buf);
 351 
 352                 memcpy (buffer_data, image_data, GST_BUFFER_SIZE (buf));
 353                 GST_BUFFER_TIMESTAMP(buf) = timestamp;
 354 
 355                 g_queue_insert_sorted(decode->ordered_frames, buf, avcdecoder_buffer_compare, NULL);
 356                 is_buffer_enqueued = TRUE;
 357             }
 358 
 359             CVPixelBufferUnlockBaseAddress (imageBuffer, 0); // ignore return value
 360         }
 361 
 362         CVBufferRelease(imageBuffer);
 363 
 364         if (!is_buffer_enqueued)
 365         {
 366             GstBuffer* buf = gst_buffer_new();
 367             GST_BUFFER_TIMESTAMP(buf) = timestamp;
 368             GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_GAP);
 369             g_queue_insert_sorted(decode->ordered_frames, buf, avcdecoder_buffer_compare, NULL);
 370         }
 371     }
 372 
 373     GstBuffer* frame;
 374     GstFlowReturn ret = GST_FLOW_OK;
 375     while(ret == GST_FLOW_OK && !decode->is_flushing && NULL != (frame = g_queue_peek_head(decode->ordered_frames)))
 376     {
 377         GstClockTime ts = GST_BUFFER_TIMESTAMP(frame);
 378         if(GST_CLOCK_TIME_NONE == decode->previous_timestamp ||         // first frame
 379            ts <= decode->previous_timestamp + decode->timestamp_ceil || // frame is at next timestamp
 380            (0 == deltaFlag && ts < timestamp))                          // have newer I-frame
 381         {
 382             if(GST_BUFFER_FLAG_IS_SET(frame, GST_BUFFER_FLAG_GAP))
 383             {
 384                 // INLINE - gst_buffer_unref()
 385                 gst_buffer_unref (frame);
 386             }
 387             else
 388             {
 389                 if(decode->is_newsegment)
 390                 {
 391                     GST_BUFFER_FLAG_SET(frame, GST_BUFFER_FLAG_DISCONT);
 392                     decode->is_newsegment = FALSE;
 393                 }
 394                 ret = gst_pad_push(decode->srcpad, frame);
 395             }
 396             decode->previous_timestamp = ts;
 397             g_queue_pop_head(decode->ordered_frames);
 398         }
 399         else
 400         {
 401             break;
 402         }
 403     }
 404 }
 405 
 406 /*
 407  * GFunc used to unref GstBuffers in a queue.
 408  */
 409 static void
 410 avcdecoder_element_destroy(gpointer data, gpointer user_data)
 411 {
 412     if (NULL != data)
 413     {
 414         GstBuffer* buf = (GstBuffer*)data;
 415 
 416         // INLINE - gst_buffer_unref()
 417         gst_buffer_unref (buf);
 418     }
 419 }
 420 
 421 /**
 422  * Initialize the AvcDecoder structure. This should happen
 423  * only once, before decoding begins.
 424  */
 425 static void
 426 avcdecoder_state_init(AvcDecoder *decode)
 427 {
 428     decode->outputCallback = (VDADecoderOutputCallback*)avcdecoder_decoder_output_callback;
 429     decode->decoder = NULL;
 430     decode->is_initialized = FALSE;
 431     decode->is_newsegment = FALSE;
 432     decode->is_stride_set = FALSE;
 433     decode->frame_duration = GST_CLOCK_TIME_NONE;
 434     decode->ordered_frames = g_queue_new();
 435     decode->segment_start = 0;
 436 }
 437 
 438 /**
 439  * Reset the state of the AvcDecoder structure.
 440  */
 441 static void
 442 avcdecoder_state_reset(AvcDecoder *decode)
 443 {
 444     // Flush the decoder.
 445     if (NULL != decode->decoder)
 446     {
 447         OSStatus result = VDADecoderFlush (decode->decoder, 0);
 448 #if ENABLE_WARNINGS
 449         if (kVDADecoderNoErr != result)
 450         {
 451             g_warning ("Could not flush decoder: result code %d\n", (int)result);
 452         }
 453 #endif
 454     }
 455 
 456     // Unref all sorted buffers and clear the associated queue.
 457     if (NULL != decode->ordered_frames)
 458     {
 459         g_queue_foreach(decode->ordered_frames, avcdecoder_element_destroy, NULL);
 460         g_queue_clear(decode->ordered_frames);
 461     }
 462 
 463     decode->is_newsegment = FALSE;
 464     decode->segment_start = 0;
 465 }
 466 
 467 /**
 468  * Reset and then destroy the state of the AvcDecoder structure.
 469  */
 470 static void
 471 avcdecoder_state_destroy(AvcDecoder *decode)
 472 {
 473     // Reset the state.
 474     avcdecoder_state_reset(decode);
 475 
 476     // Release the VDADecoder.
 477     if (NULL != decode->decoder)
 478     {
 479         OSStatus result = VDADecoderDestroy (decode->decoder);
 480 #if ENABLE_WARNINGS
 481         if (kVDADecoderNoErr != result)
 482         {
 483             g_warning ("Could not destroy decoder: result code %d\n", (int)result);
 484         }
 485 #endif
 486         decode->decoder = NULL;
 487     }
 488 
 489     // Free the sorted queue.
 490     if (NULL != decode->ordered_frames)
 491     {
 492         g_queue_free(decode->ordered_frames);
 493         decode->ordered_frames = NULL;
 494     }
 495 }
 496 
 497 /*
 498  * Perform processing needed for state transitions.
 499  */
 500 static GstStateChangeReturn
 501 avcdecoder_change_state (GstElement* element, GstStateChange transition)
 502 {
 503     AvcDecoder *decode = AVCDECODER(element);
 504 
 505     switch(transition)
 506     {
 507         case GST_STATE_CHANGE_NULL_TO_READY:
 508             // Initialize the AvcDecoder structure.
 509             avcdecoder_state_init (decode);
 510             break;
 511         default:
 512             break;
 513     }
 514 
 515     // Change state.
 516     return parent_class->change_state(element, transition);
 517 }
 518 
 519 /*
 520  * FLUSH_START, NEWSEGMENT, and FLUSH_STOP are recognized and forwarded;
 521  * all others are simply forwarded.
 522  */
 523 static gboolean
 524 avcdecoder_sink_event (GstPad * pad, GstEvent * event)
 525 {
 526     gboolean ret;
 527     GstObject *parent = gst_object_get_parent((GstObject*)pad);
 528     AvcDecoder *decode = AVCDECODER (GST_OBJECT_PARENT (pad));
 529     GstEvent *newsegment = NULL;
 530 
 531     switch (GST_EVENT_TYPE (event))
 532     {
 533         case GST_EVENT_FLUSH_START:
 534         {
 535             // Start flushing buffers.
 536 
 537             // Set flag so chain function refuses buffers.
 538             decode->is_flushing = TRUE;
 539 
 540             break;
 541         }
 542 
 543         case GST_EVENT_FLUSH_STOP:
 544         {
 545             // Stop flushing buffers.
 546             avcdecoder_state_reset(decode);
 547 
 548             // Unset flag so chain function accepts buffers.
 549             decode->is_flushing = FALSE;
 550 
 551             break;
 552         }
 553 
 554         case GST_EVENT_NEWSEGMENT:
 555         {
 556             // Set a flag indicating a new segment has begun.
 557             decode->is_newsegment = TRUE;
 558             decode->previous_timestamp = GST_CLOCK_TIME_NONE;
 559             GstFormat segment_format;
 560             gint64 start;
 561             gst_event_parse_new_segment(event, NULL, NULL, &segment_format,
 562                                         &start, NULL, NULL);
 563             if(GST_FORMAT_TIME == segment_format)
 564             {
 565                 decode->segment_start = start;
 566             }
 567             break;
 568         }
 569 
 570         default:
 571             break;
 572     }
 573 
 574     // Push the event downstream.
 575     ret = gst_pad_push_event (decode->srcpad, event);
 576 
 577     // Unlock the parent object.
 578     gst_object_unref(parent);
 579 
 580     return ret;
 581 }
 582 
 583 /*
 584  * Processes a buffer of AVC-encoded video data pushed to the sink pad.
 585  */
 586 static GstFlowReturn
 587 avcdecoder_chain (GstPad * pad, GstBuffer * buf)
 588 {
 589     GstFlowReturn ret = GST_FLOW_OK;
 590     AvcDecoder *decode = AVCDECODER (GST_OBJECT_PARENT (pad));
 591     OSStatus status = kVDADecoderNoErr;
 592 //    g_print("chain - time %f discont %d flags %d\n",
 593 //            (float)GST_BUFFER_TIMESTAMP(buf)/(float)GST_SECOND,
 594 //            (int)GST_BUFFER_IS_DISCONT(buf), (int)GST_BUFFER_FLAGS(buf));
 595 
 596     // If between FLUSH_START and FLUSH_STOP, reject new buffers.
 597     if (decode->is_flushing)
 598     {
 599         // Unref the input buffer.
 600         // INLINE - gst_buffer_unref()
 601         gst_buffer_unref(buf);
 602 
 603         return GST_FLOW_WRONG_STATE;
 604     }
 605 
 606     // Initialize the element structure.
 607     if (FALSE == decode->is_initialized)
 608     {
 609         // Obtain configuration data from the "codec_data" structure in the sink caps.
 610         GstCaps* videoSpecificCaps = GST_BUFFER_CAPS (buf);
 611         if (NULL == videoSpecificCaps || gst_caps_get_size(videoSpecificCaps) < 1)
 612         {
 613             // INLINE - gst_buffer_unref()
 614             gst_buffer_unref(buf);
 615             return GST_FLOW_ERROR;
 616         }
 617 
 618         GstStructure* videoSpecificStructure = gst_caps_get_structure (videoSpecificCaps, 0);
 619 
 620         const GValue *videoSpecificValue = gst_structure_get_value(videoSpecificStructure, "codec_data");
 621         if (NULL == videoSpecificValue)
 622         {
 623             // INLINE - gst_buffer_unref()
 624             gst_buffer_unref(buf);
 625             return GST_FLOW_ERROR;
 626         }
 627 
 628         gint encoded_width;
 629         if (!gst_structure_get_int (videoSpecificStructure, "width", &encoded_width))
 630             encoded_width = 0;
 631 
 632         gint encoded_height;
 633         if (!gst_structure_get_int (videoSpecificStructure, "height", &encoded_height))
 634             encoded_height = 0;
 635 
 636         gint framerate_num;
 637         gint framerate_den;
 638         if (!gst_structure_get_fraction (videoSpecificStructure, "framerate", &framerate_num, &framerate_den))
 639         {
 640             framerate_num = 25;
 641             framerate_den = 1;
 642         }
 643 
 644         // Calculate frame duration and timestamp bound.
 645         decode->frame_duration = gst_util_uint64_scale_int_ceil(GST_SECOND, framerate_den, framerate_num);
 646         decode->timestamp_ceil = (GstClockTime)(1.5*decode->frame_duration + 0.5);
 647 
 648         GstBuffer*  videoSpecificBuffer = gst_value_get_buffer (videoSpecificValue);
 649         guint8* videoSpecificData = GST_BUFFER_DATA (videoSpecificBuffer);
 650         guint videoSpecificDataLength = GST_BUFFER_SIZE (videoSpecificBuffer);
 651 
 652         SInt32 avcWidth = (SInt32)encoded_width;
 653         SInt32 avcHeight = (SInt32)encoded_height;
 654 
 655         // Set up parameters required to create the VDADecoder.
 656         CFNumberRef width = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &avcWidth);
 657         CFNumberRef height = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &avcHeight);
 658         SInt32 sourceFormat = 'avc1';
 659         CFNumberRef avcFormat = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &sourceFormat);
 660         CFDataRef avcCData = CFDataCreate(kCFAllocatorDefault, videoSpecificData, videoSpecificDataLength);
 661 
 662         CFMutableDictionaryRef decoderConfiguration = (CFDictionaryCreateMutable(kCFAllocatorDefault, 4, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
 663 
 664         CFDictionarySetValue(decoderConfiguration, kVDADecoderConfiguration_Height, height);
 665         CFDictionarySetValue(decoderConfiguration, kVDADecoderConfiguration_Width, width);
 666         CFDictionarySetValue(decoderConfiguration, kVDADecoderConfiguration_SourceFormat, avcFormat);
 667         CFDictionarySetValue(decoderConfiguration, kVDADecoderConfiguration_avcCData, avcCData);
 668 
 669         // Note: For 'yuvs' the formatType should be kYUVSPixelFormat.
 670         SInt32 formatType = k2vuyPixelFormat;
 671         CFNumberRef imgFormat = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &formatType);
 672         CFMutableDictionaryRef destinationImageBufferAttributes = CFDictionaryCreateMutable(kCFAllocatorDefault, 2, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
 673 
 674         // empty IOSurface properties dictionary
 675         CFDictionaryRef emptyDictionary = CFDictionaryCreate(kCFAllocatorDefault,
 676                                                              NULL,
 677                                                              NULL,
 678                                                              0,
 679                                                              &kCFTypeDictionaryKeyCallBacks,
 680                                                              &kCFTypeDictionaryValueCallBacks);
 681 
 682         CFDictionarySetValue(destinationImageBufferAttributes,
 683                              kCVPixelBufferPixelFormatTypeKey, imgFormat);
 684         CFDictionarySetValue(destinationImageBufferAttributes,
 685                              kCVPixelBufferIOSurfacePropertiesKey,
 686                              emptyDictionary); // XXX probably should delete this.
 687 
 688         // Create the VDADecoder.
 689         status = VDADecoderCreate(decoderConfiguration,
 690                                   destinationImageBufferAttributes,
 691                                   (VDADecoderOutputCallback *)decode->outputCallback,
 692                                   (void *)decode,
 693                                   &decode->decoder);
 694 
 695         if (decoderConfiguration)
 696             CFRelease(decoderConfiguration);
 697         if (destinationImageBufferAttributes)
 698             CFRelease(destinationImageBufferAttributes);
 699         if (emptyDictionary)
 700             CFRelease(emptyDictionary);
 701         if (avcCData)
 702             CFRelease(avcCData);
 703 
 704         if (kVDADecoderNoErr == status)
 705         {
 706             // Set the srcpad caps.
 707 
 708             // Note: For 'yuvs' the format should be GST_MAKE_FOURCC ('Y', 'U', 'Y', '2')
 709             GstCaps* caps = gst_caps_new_simple (
 710                                                  "video/x-raw-ycbcr422",
 711                                                  "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'),
 712                                                  "framerate", GST_TYPE_FRACTION, framerate_num, framerate_den,
 713                                                  "width", G_TYPE_INT, encoded_width,
 714                                                  "height", G_TYPE_INT, encoded_height,
 715                                                  NULL);
 716             gst_pad_set_caps (decode->srcpad, caps);
 717             gst_caps_unref (caps);
 718 
 719             decode->is_initialized = TRUE;
 720         }
 721         else
 722         {
 723 #if ENABLE_WARNINGS
 724             const char* message;
 725             switch (status)
 726             {
 727                 case kVDADecoderHardwareNotSupportedErr:
 728                     message = "hardware does not support accelerated video decode services";
 729                     break;
 730                 case kVDADecoderFormatNotSupportedErr:
 731                     message = "hardware decoder does not support requested output format";
 732                     break;
 733                 case kVDADecoderConfigurationError:
 734                     message = "unsupported hardware decoder configuration parameters";
 735                     break;
 736                 case kVDADecoderDecoderFailedErr:
 737                     message = "hardware decoder resources in use by another process or cannot decode the source into the requested format";
 738                     break;
 739                 default:
 740                     message = "unknown error";
 741                     break;
 742             }
 743             g_warning ("Could not create decoder: result code %d, %s", (int)status, message);
 744 #endif
 745 
 746             // Post an error message to the pipeline bus.
 747             GError* error = g_error_new (g_quark_from_string("AVCDecoder"), 666, "%s", message);
 748             GstMessage* msg = gst_message_new_error (GST_OBJECT (decode), error, message);
 749             gst_element_post_message(GST_ELEMENT(decode), msg);
 750 
 751             ret = GST_FLOW_ERROR;
 752         }
 753     }
 754 
 755     if (GST_FLOW_OK == ret)
 756     {
 757         // Set the timestamp of the encoded frame.
 758         int64_t timestamp = GST_BUFFER_TIMESTAMP (buf);
 759         CFStringRef timestamp_key = CFSTR("timestamp");
 760         CFNumberRef timestamp_value = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &timestamp);
 761         int32_t deltaFlag = (int32_t)(GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT) ?
 762                                       GST_BUFFER_FLAG_DELTA_UNIT : 0);
 763         CFStringRef delta_key = CFSTR("deltaFlag");
 764         CFNumberRef delta_value = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &deltaFlag);
 765         CFStringRef keys[2];
 766         CFNumberRef values[2];
 767         keys[0] = timestamp_key;
 768         keys[1] = delta_key;
 769         values[0] = timestamp_value;
 770         values[1] = delta_value;
 771         CFDictionaryRef frame_info = CFDictionaryCreate(kCFAllocatorDefault,
 772                                                         (const void **)&keys,
 773                                                         (const void **)&values,
 774                                                         2,
 775                                                         &kCFTypeDictionaryKeyCallBacks,
 776                                                         &kCFTypeDictionaryValueCallBacks);
 777         CFTypeRef buffer = CFDataCreate(kCFAllocatorDefault, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
 778 
 779         // Send the encoded frame to the VDADecoder.
 780         status = VDADecoderDecode (decode->decoder, 0, buffer, frame_info);
 781         CFRelease(buffer);
 782         CFRelease(frame_info);
 783 
 784         if (kVDADecoderNoErr != status)
 785         {
 786 #if ENABLE_WARNINGS
 787             g_warning ("Could not decode data: result code %d\n", (int)status);
 788 #endif
 789 
 790             // Set an error return code only if this was not a "simple" decoding error.
 791             if (kVDADecoderDecoderFailedErr != status)
 792             {
 793                 ret = GST_FLOW_ERROR;
 794             }
 795         }
 796     }
 797 
 798     // INLINE - gst_buffer_unref()
 799     gst_buffer_unref (buf);
 800 
 801     return ret;
 802 }
 803 
 804 // --------------------------------------------------------------------------
 805 gboolean avcdecoder_plugin_init (GstPlugin * avcdecoder)
 806 {
 807     /* debug category for fltering log messages
 808      *
 809      * exchange the string 'Template avcdecoder' with your description
 810      */
 811     GST_DEBUG_CATEGORY_INIT (avcdecoder_debug, "avcdecoder",
 812                              0, "Template avcdecoder"); // FIXME
 813 
 814     return gst_element_register (avcdecoder, "avcdecoder", 512, TYPE_AVCDECODER);
 815 }