RDK Documentation (Open Sourced RDK Components)
mediasourcepipeline.cpp
1 /*
2  * If not stated otherwise in this file or this component's Licenses.txt file the
3  * following copyright and licenses apply:
4  *
5  * Copyright 2017 RDK Management
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 
20 #include "mediasourcepipeline.h"
21 #include "GstMSESrc.h"
22 
23 #define __STDC_FORMAT_MACROS
24 #include <inttypes.h>
25 #include <linux/input.h>
26 
27 #include <algorithm>
28 #include <cmath>
29 #include <string>
30 #include <cstring>
31 #include <vector>
32 #include <unistd.h>
33 
34 rtDefineObject (MediaSourcePipeline, rtObject);
35 rtDefineMethod (MediaSourcePipeline, suspend);
36 rtDefineMethod (MediaSourcePipeline, resume);
37 
38 namespace {
39 const int kVideoReadDelayMs =
40  25; // add some artificial network latency to video frame reads
41 const int kAudioReadDelayMs =
42  10; // add some artificial network latency to audio frame reads
43 const int kStatusDelayMs =
44  50; // update interval for checking status, like playback position
45 const float kSeekEndDeltaSecs =
46  0.0f; // delta time from end of current playback file to trigger a seek
47 const int kChunkDemuxerSeekDelayMs =
48  50; // simulated chunk demuxer seek latency before a seek is completed
49 const int kPlaybackPositionHistorySize =
50  10; // size of history for collecting playback position to determine
51  // end of a raw frame file playback
52 const int64_t kPlaybackPositionUpdateIntervalMs =
53  1000; // Update interval in milliseconds
54  // of when playback position is outputted to stdout
55 
56 } // namespace
57 
58 // #define DEBUG_PRINTS // define to get more verbose printing
59 
60 unsigned getGstPlayFlag(const char* nick)
61 {
62  static GFlagsClass* flagsClass = static_cast<GFlagsClass*>(g_type_class_ref(g_type_from_name("GstPlayFlags")));
63 
64  GFlagsValue* flag = g_flags_get_value_by_nick(flagsClass, nick);
65  if (!flag)
66  return 0;
67 
68  return flag->value;
69 }
70 
71 static gboolean MessageCallbackStatic(GstBus*,
72  GstMessage* message,
73  MediaSourcePipeline* msp) {
74  return msp->HandleMessage(message);
75 }
76 
77 static void StartFeedStatic(GstAppSrc* appsrc,
78  guint size,
79  MediaSourcePipeline* msp) {
80  msp->StartFeedingAppSource(appsrc);
81 }
82 
83 static void StopFeedStatic(GstAppSrc* appsrc, MediaSourcePipeline* msp) {
84  msp->StopFeedingAppSource(appsrc);
85 }
86 
87 static gboolean SeekDataStatic(GstAppSrc* appsrc,
88  guint64 position,
89  MediaSourcePipeline* msp) {
90  msp->SetNewAppSourceReadPosition(appsrc, position);
91  return TRUE;
92 }
93 
94 static void OnAutoPadAddedMediaSourceStatic(GstElement* decodebin2,
95  GstPad* pad,
96  MediaSourcePipeline* msp) {
97  msp->OnAutoPadAddedMediaSource(decodebin2, pad);
98 }
99 
100 static void OnAutoElementAddedMediaSourceStatic(GstBin* bin,
101  GstElement* element,
102  MediaSourcePipeline* msp) {
103  msp->OnAutoElementAddedMediaSource(element);
104 }
105 
106 static gboolean readVideoFrameStatic(MediaSourcePipeline* msp) {
107  return msp->ReadVideoFrame();
108 }
109 
110 static gboolean readAudioFrameStatic(MediaSourcePipeline* msp) {
111  return msp->ReadAudioFrame();
112 }
113 
114 static gboolean StatusPollStatic(MediaSourcePipeline* msp) {
115  return msp->StatusPoll();
116 }
117 
118 static gboolean ChunkDemuxerSeekStatic(MediaSourcePipeline* msp) {
119  return msp->ChunkDemuxerSeek();
120 }
121 
122 static void sourceChangedCallback(GstElement* element, GstElement* source, gpointer data)
123 {
125  msp->sourceChanged();
126 }
127 
129 {
130  if (!source_)
131  g_object_get(pipeline_, "source", &source_, NULL);
132 
133  printf("sourceChanged!:%p\n",source_);
134 }
135 
136 gboolean MediaSourcePipeline::HandleMessage(GstMessage* message) {
137  GError* error;
138  gchar* debug;
139  switch (GST_MESSAGE_TYPE(message)){
140  case GST_MESSAGE_ERROR:
141  gst_message_parse_error(message, &error, &debug);
142  printf("gstBusCallback() error! code: %d, %s, Debug: %s\n", error->code, error->message, debug);
143  g_error_free(error);
144  g_free(debug);
145  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pipeline_), GST_DEBUG_GRAPH_SHOW_ALL, "error-pipeline");
146  break;
147  case GST_MESSAGE_WARNING:
148  gst_message_parse_warning(message, &error, &debug);
149  printf("gstBusCallback() warning! code: %d, %s, Debug: %s\n", error->code, error->message, debug);
150  g_error_free(error);
151  g_free(debug);
152  break;
153  case GST_MESSAGE_EOS: {
154 
155  printf("Gstreamer EOS message received\n");
156  break;
157  }
158  case GST_MESSAGE_STATE_CHANGED:
159  gchar* filename;
160  GstState oldstate, newstate, pending;
161  gst_message_parse_state_changed (message, &oldstate, &newstate, &pending);
162 
163 
164  // Ignore messages not coming directly from the pipeline.
165  if (GST_ELEMENT(GST_MESSAGE_SRC(message)) != pipeline_)
166  break;
167 
168  filename = g_strdup_printf("%s-%s", gst_element_state_get_name(oldstate), gst_element_state_get_name(newstate));
169  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pipeline_), GST_DEBUG_GRAPH_SHOW_ALL, filename);
170  g_free(filename);
171 
172  // get the name and state
173  if (GST_MESSAGE_SRC_NAME(message)){
174  //printf("gstBusCallback() Got state message from %s\n", GST_MESSAGE_SRC_NAME (message));
175  }
176  printf("gstBusCallback() old_state %s, new_state %s, pending %s\n",
177  gst_element_state_get_name (oldstate), gst_element_state_get_name (newstate), gst_element_state_get_name (pending));
178 
179  if (oldstate == GST_STATE_NULL && newstate == GST_STATE_READY) {
180  } else if (oldstate == GST_STATE_READY && newstate == GST_STATE_PAUSED) {
181  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pipeline_), GST_DEBUG_GRAPH_SHOW_ALL, "paused-pipeline");
182  printf("Ready to Paused finished!\n");
183  } else if (oldstate == GST_STATE_PAUSED && newstate == GST_STATE_PAUSED) {
184  } else if (oldstate == GST_STATE_PAUSED && newstate == GST_STATE_PLAYING) {
185  printf("Pipeline is now in play state!\n");
186  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pipeline_), GST_DEBUG_GRAPH_SHOW_ALL, "playing-pipeline");
187  } else if (oldstate == GST_STATE_PLAYING && newstate == GST_STATE_PAUSED) {
188  printf("Pipline finished from play to pause\n");
189  } else if (oldstate == GST_STATE_PAUSED && newstate == GST_STATE_READY) {
190  } else if (oldstate == GST_STATE_READY && newstate == GST_STATE_NULL) {
191  }
192  break;
193  default:
194  break;
195  }
196  return true;
197 }
198 
199 bool MediaSourcePipeline::ShouldPerformSeek() {
200  float seek_delta_secs =
201  std::fabs(playback_position_secs_ - current_end_time_secs_);
202 
203  if (seek_delta_secs < kSeekEndDeltaSecs || playback_position_secs_ >= current_end_time_secs_)
204  return true;
205 
206  // Since it's possible that the playback position won't reach the time
207  // reported by the last possible frame timestamp, we will assume that we are
208  // done with a frame file if we are playing and the playback position stays
209  // the same for kPlaybackPositionHistorySize
210  if (is_playing_ && IsPlaybackStalled())
211  return true;
212 
213  return false;
214 }
215 
216 bool MediaSourcePipeline::IsPlaybackOver() {
217  // playback is over when there are no more files to play
218 
219  std::ostringstream counter_stream;
220  counter_stream << (current_file_counter_ + 1);
221 
222  std::string audio_timestamp_path =
223  frame_files_path_ + "/raw_audio_frames_" + counter_stream.str() + ".txt";
224  std::string video_timestamp_path =
225  frame_files_path_ + "/raw_video_frames_" + counter_stream.str() + ".txt";
226 
227  FILE* audio_timestamp_file = fopen(audio_timestamp_path.c_str(), "r");
228  FILE* video_timestamp_file = fopen(video_timestamp_path.c_str(), "r");
229 
230  if (audio_timestamp_file == NULL && video_timestamp_file == NULL) {
231  return true;
232  } else {
233  if (audio_timestamp_file)
234  fclose(audio_timestamp_file);
235  if (video_timestamp_file)
236  fclose(video_timestamp_file);
237  return false;
238  }
239 }
240 
241 bool MediaSourcePipeline::HasPlaybackAdvanced() {
242  int32_t pos = current_playback_history_cnt_ - 1;
243  if (pos < 0)
244  pos = kPlaybackPositionHistorySize - 1;
245 
246  int32_t counter = 1;
247  int64_t val = playback_position_history_[pos];
248  bool did_advance = true;
249  while (counter < kPlaybackPositionHistorySize) {
250  pos--;
251  if (pos < 0)
252  pos = kPlaybackPositionHistorySize - 1;
253 
254  if (val <= playback_position_history_[pos] ||
255  playback_position_history_[pos] == -1) {
256  did_advance = false;
257  break;
258  }
259 
260  val = playback_position_history_[pos];
261  ++counter;
262  }
263 
264  return did_advance;
265 }
266 
267 void MediaSourcePipeline::AddPlaybackPositionToHistory(int64_t position) {
268  GstState state;
269  gst_element_get_state(pipeline_, &state, 0, 0);
270 
271  if (state == GST_STATE_PLAYING) {
272  playback_position_history_[current_playback_history_cnt_] = position;
273  current_playback_history_cnt_ =
274  (current_playback_history_cnt_ + 1) % kPlaybackPositionHistorySize;
275  }
276 }
277 
278 bool MediaSourcePipeline::IsPlaybackStalled() {
279  int64_t first_pos = playback_position_history_[0];
280 
281  for (size_t i = 1; i < playback_position_history_.size(); i++) {
282  if (playback_position_history_[i] != first_pos ||
283  playback_position_history_[i] < 0)
284  return false;
285  }
286 
287  return true; //playback_started_;
288 }
289 
290 void MediaSourcePipeline::finishPipelineLinkingAndStartPlaybackIfNeeded()
291 {
292  if (source_ && !gst_mse_src_configured(source_)) {
293  if(pipeline_type_ != kAudioOnly)
294  gst_mse_src_register_player(source_, (GstElement*) appsrc_source_video_);
295  if(pipeline_type_ != kVideoOnly)
296  gst_mse_src_register_player(source_, (GstElement*) appsrc_source_audio_);
297 
298  gst_mse_src_configuration_done(source_);
299 
300  printf("Finished linking pipeline and putting it in play!\n");
301  gst_element_set_state(pipeline_, GST_STATE_PLAYING);
302  is_playing_ = true;
303  }
304 }
305 
307  GstFormat fmt = GST_FORMAT_TIME;
308  gint64 position = -1;
309 
310  // finish linking all of pipeline if we haven't yet
311  finishPipelineLinkingAndStartPlaybackIfNeeded();
312 
313  if(pipeline_type_ != kAudioOnly) // westeros sink gives a more accurate position
314  gst_element_query_position(video_sink_, fmt, &position);
315  else
316  gst_element_query_position(pipeline_, fmt, &position);
317 
318  position += (seek_offset_ * 1000);
319  if (position != static_cast<gint64>(GST_CLOCK_TIME_NONE)) {
320  AddPlaybackPositionToHistory(position);
321  if (!playback_started_)
322  playback_started_ = HasPlaybackAdvanced();
323  playback_position_secs_ = (static_cast<double>(position) / GST_SECOND);
324 
325  static int64_t position_update_cnt = 0;
326  if (position_update_cnt == 0)
327  printf("playback position: %f secs\n", playback_position_secs_);
328 
329  position_update_cnt = (position_update_cnt + kStatusDelayMs) %
330  kPlaybackPositionUpdateIntervalMs;
331  }
332 
333 #ifdef DEBUG_PRINTS
334  printf("playback started:%d\n", playback_started_);
335 #endif
336  if (ShouldPerformSeek()) {
337  if (IsPlaybackOver()) {
338  printf("Current end time:%f\n", current_end_time_secs_);
339  printf("Playback Complete! Starting over...\n");
340 
341  // reset file counter back to before beginning
342  current_file_counter_ = -1;
343  PerformSeek();
344  } else {
345  printf("Performing Seek!\n");
346  PerformSeek();
347  }
348  }
349 
350  return TRUE;
351 }
352 
354  if (seeking_) {
355  video_frame_timeout_handle_ = 0;
356  return FALSE;
357  }
358 
359  AVFrame video_frame;
360  ReadStatus read_status = GetNextFrame(&video_frame, kVideo);
361 #ifdef DEBUG_PRINTS
362  printf("Video frame read status:%d\n", read_status);
363 #endif
364 
365  if (read_status != kFrameRead) {
366  video_frame_timeout_handle_ = 0;
367  return FALSE;
368  }
369 
370 #ifdef DEBUG_PRINTS
371  float frame_time_seconds = video_frame.timestamp_us_ / 1000000.0f;
372  printf("read video frame: time:%f secs, size:%d bytes\n",
373  frame_time_seconds,
374  video_frame.size_);
375 #endif
376 
377  PushFrameToAppSrc(video_frame, kVideo);
378 
379  return TRUE;
380 }
381 
383  if (seeking_) {
384  audio_frame_timeout_handle_ = 0;
385  return FALSE;
386  }
387 
388  AVFrame audio_frame;
389  ReadStatus read_status = GetNextFrame(&audio_frame, kAudio);
390 
391 #ifdef DEBUG_PRINTS
392  printf("Audio frame read status:%d\n", read_status);
393 #endif
394 
395  if (read_status != kFrameRead) {
396  audio_frame_timeout_handle_ = 0;
397  return FALSE;
398  }
399 
400 #ifdef DEBUG_PRINTS
401  float frame_time_seconds = audio_frame.timestamp_us_ / 1000000.0f;
402  printf("read audio frame: time:%f secs, size:%d bytes\n",
403  frame_time_seconds,
404  audio_frame.size_);
405 #endif
406 
407  PushFrameToAppSrc(audio_frame, kAudio);
408 
409  return TRUE;
410 }
411 
413  if (seeking_)
414  return;
415 
416  bool start_up_reading_again = false;
417 
418  start_up_reading_again =
419  !ShouldBeReading(p_src == appsrc_source_video_ ? kVideo : kAudio);
420  if (start_up_reading_again)
421  SetShouldBeReading(true, p_src == appsrc_source_video_ ? kVideo : kAudio);
422 
423  if (start_up_reading_again) {
424  if (p_src == appsrc_source_video_) {
425  video_frame_timeout_handle_ =
426  g_timeout_add(kVideoReadDelayMs,
427  reinterpret_cast<GSourceFunc>(readVideoFrameStatic),
428  this);
429  } else { // audio
430  audio_frame_timeout_handle_ =
431  g_timeout_add(kAudioReadDelayMs,
432  reinterpret_cast<GSourceFunc>(readAudioFrameStatic),
433  this);
434  }
435  }
436 }
437 
439  if (p_src == appsrc_source_video_) {
440  if (video_frame_timeout_handle_) {
441  g_source_remove(video_frame_timeout_handle_);
442  video_frame_timeout_handle_ = 0;
443  }
444  SetShouldBeReading(false, kVideo);
445  } else {
446  if (audio_frame_timeout_handle_) {
447  g_source_remove(audio_frame_timeout_handle_);
448  audio_frame_timeout_handle_ = 0;
449  }
450  SetShouldBeReading(false, kAudio);
451  }
452 }
453 
455  guint64 position) {}
456 
458  GstPad* pad) {
459  GstCaps* caps;
460  GstStructure* structure;
461  const gchar* name;
462  std::vector<GstElement*>::iterator src_it;
463  std::vector<GstElement*>::iterator dest_it;
464 
465  caps = gst_pad_query_caps(pad,NULL);
466  structure = gst_caps_get_structure(caps, 0);
467  name = gst_structure_get_name(structure);
468 
469  // find the element in the pipeline that created the pad, then link it with
470  // its neighbor
471  if (g_strrstr(name, "video")) {
472  src_it = std::find(
473  ms_video_pipeline_.begin(), ms_video_pipeline_.end(), element);
474  dest_it = src_it + 1;
475  if (src_it != ms_video_pipeline_.end() &&
476  dest_it != ms_video_pipeline_.end()) {
477  if (gst_element_link(*src_it, *dest_it) == FALSE) {
478  g_print("Couldn't link auto pad added elements in video pipeline\n");
479  }
480  }
481  } else if (g_strrstr(name, "audio")) {
482  src_it = std::find(
483  ms_audio_pipeline_.begin(), ms_audio_pipeline_.end(), element);
484  dest_it = src_it + 1;
485  if (src_it != ms_audio_pipeline_.end() &&
486  dest_it != ms_audio_pipeline_.end()) {
487  if (gst_element_link(*src_it, *dest_it) == FALSE) {
488  g_print("Couldn't link auto pad added elements in audio pipeline\n");
489  }
490  }
491  }
492 
493  gst_caps_unref(caps);
494 }
495 
497  // check for the dynamically adding of the real audio sink
498  if (g_strrstr(GST_ELEMENT_NAME(element), "audio") &&
499  g_strrstr(GST_ELEMENT_NAME(element), "sink")) {
500  audio_sink_ = element;
501  }
502 }
503 
504 MediaSourcePipeline::MediaSourcePipeline(std::string frame_files_path)
505  : frame_files_path_(frame_files_path)
506 {
507  Init();
508 }
509 
510 MediaSourcePipeline::~MediaSourcePipeline() { Destroy(); }
511 
512 void MediaSourcePipeline::Init()
513 {
514  current_file_counter_ = 0;
515  current_video_file_ = NULL;
516  current_video_timestamp_file_ = NULL;
517  current_audio_file_ = NULL;
518  current_audio_timestamp_file_ = NULL;
519  seeking_ = false;
520  pipeline_ = NULL;
521  appsrc_source_video_ = NULL;
522  appsrc_source_audio_ = NULL;
523  video_sink_ = NULL;
524  audio_sink_ = NULL;
525  playback_position_secs_ = 0;
526  current_end_time_secs_ = 0;
527  video_frame_timeout_handle_ = 0;
528  audio_frame_timeout_handle_ = 0;
529  status_timeout_handle_ = 0;
530  current_playback_history_cnt_ = 0;
531  playback_started_ = false;
532  is_playing_ = false;
533  pipeline_type_ = kAudioVideo;
534  source_ = NULL;
535  appsrc_caps_video_ = NULL;
536  appsrc_caps_audio_ = NULL;
537  pause_before_seek_ = false;
538  is_active_ = true;
539  seek_offset_ = 0;
540 
541 
542  memset(&should_be_reading_, 0, sizeof(should_be_reading_));
543 
544  playback_position_history_.resize(kPlaybackPositionHistorySize, 0);
545  ResetPlaybackHistory();
546 }
547 
548 bool MediaSourcePipeline::ShouldBeReading(AVType av) {
549  return should_be_reading_[av];
550 }
551 
552 void MediaSourcePipeline::SetShouldBeReading(bool is_reading, AVType av) {
553  should_be_reading_[av] = is_reading;
554 }
555 
556 bool MediaSourcePipeline::PushFrameToAppSrc(const AVFrame& frame, AVType type) {
557  GstFlowReturn ret = GST_FLOW_OK;
558 
559  GstBuffer* gst_buffer = gst_buffer_new_wrapped(frame.data_, frame.size_);
560  GstSample* sample = NULL;
561  GST_BUFFER_TIMESTAMP(gst_buffer) = (frame.timestamp_us_ - seek_offset_) * 1000;
562 
563  if (type == kVideo)
564  {
565  sample = gst_sample_new(gst_buffer, appsrc_caps_video_, NULL, NULL);
566  ret = gst_app_src_push_sample(GST_APP_SRC(appsrc_source_video_), sample);
567  }
568  else // kAudio
569  {
570  sample = gst_sample_new(gst_buffer, appsrc_caps_audio_, NULL, NULL);
571  ret = gst_app_src_push_sample(GST_APP_SRC(appsrc_source_audio_), sample);
572  }
573 
574  gst_buffer_unref(gst_buffer);
575  gst_sample_unref(sample);
576 
577  if (ret != GST_FLOW_OK) {
578  fprintf(stderr, "APPSRC PUSH FAILED!\n");
579  return false;
580  }
581 
582  return true;
583 }
584 
585 int64_t MediaSourcePipeline::GetCurrentStartTimeMicroseconds() const {
586  std::ostringstream counter_stream;
587  counter_stream << current_file_counter_;
588 
589  std::string audio_timestamp_path =
590  frame_files_path_ + "/raw_audio_frames_" + counter_stream.str() + ".txt";
591  std::string video_timestamp_path =
592  frame_files_path_ + "/raw_video_frames_" + counter_stream.str() + ".txt";
593 
594  FILE* audio_timestamp_file = fopen(audio_timestamp_path.c_str(), "r");
595  FILE* video_timestamp_file = fopen(video_timestamp_path.c_str(), "r");
596 
597  int64_t smallest_time_ms = -1;
598 
599  int64_t timestamp_us;
600  int32_t frame_size;
601  if (audio_timestamp_file) {
602  if (fscanf(audio_timestamp_file,
603  "%" PRId64 ",%d,",
604  &timestamp_us,
605  &frame_size) == 2) {
606  smallest_time_ms = timestamp_us;
607  }
608  fclose(audio_timestamp_file);
609  }
610 
611  if (video_timestamp_file) {
612  if (fscanf(video_timestamp_file,
613  "%" PRId64 ",%d,",
614  &timestamp_us,
615  &frame_size) == 2) {
616  if (smallest_time_ms == -1)
617  smallest_time_ms = timestamp_us;
618  else
619  smallest_time_ms = std::min(smallest_time_ms, timestamp_us);
620  }
621  fclose(video_timestamp_file);
622  }
623 
624  return smallest_time_ms;
625 }
626 
627 void MediaSourcePipeline::CalculateCurrentEndTime() {
628  std::ostringstream counter_stream;
629  counter_stream << current_file_counter_;
630  current_end_time_secs_ = 0;
631  bool have_audio = false;
632  bool have_video = false;
633 
634  std::string audio_timestamp_path =
635  frame_files_path_ + "/raw_audio_frames_" + counter_stream.str() + ".txt";
636  std::string video_timestamp_path =
637  frame_files_path_ + "/raw_video_frames_" + counter_stream.str() + ".txt";
638 
639  FILE* audio_timestamp_file = fopen(audio_timestamp_path.c_str(), "r");
640  FILE* video_timestamp_file = fopen(video_timestamp_path.c_str(), "r");
641 
642  float greatest_time_secs = 0;
643  float greatest_video_time_secs = 0;
644  float greatest_audio_time_secs = 0;
645 
646  int64_t timestamp_us;
647  int32_t frame_size;
648  if (audio_timestamp_file) {
649  while (fscanf(audio_timestamp_file,
650  "%" PRId64 ",%d,",
651  &timestamp_us,
652  &frame_size) == 2) {
653  float timestamp_secs = timestamp_us / 1000000.0f;
654  if (greatest_audio_time_secs < timestamp_secs)
655  greatest_audio_time_secs = timestamp_secs;
656  }
657  fclose(audio_timestamp_file);
658  have_audio = true;
659  }
660 
661  if (video_timestamp_file) {
662  while (fscanf(video_timestamp_file,
663  "%" PRId64 ",%d,",
664  &timestamp_us,
665  &frame_size) == 2) {
666  float timestamp_secs = timestamp_us / 1000000.0f;
667  if (greatest_video_time_secs < timestamp_secs)
668  greatest_video_time_secs = timestamp_secs;
669  }
670  fclose(video_timestamp_file);
671  have_video = true;
672  }
673 
674  if (greatest_audio_time_secs > 0 && greatest_video_time_secs > 0)
675  greatest_time_secs =
676  std::min(greatest_audio_time_secs, greatest_video_time_secs);
677  else if (greatest_audio_time_secs > 0)
678  greatest_time_secs = greatest_audio_time_secs;
679  else if (greatest_video_time_secs > 0)
680  greatest_time_secs = greatest_video_time_secs;
681 
682 #ifdef DEBUG_PRINTS
683  printf("calculated end time, counter:%d, end time:%f\n",
684  current_file_counter_,
685  greatest_time_secs);
686  printf("greatest audio time:%f, greatest video time:%f\n",
687  greatest_audio_time_secs,
688  greatest_video_time_secs);
689 #endif
690 
691  if(have_audio && have_video)
692  pipeline_type_ = kAudioVideo;
693  else if(have_audio)
694  pipeline_type_ = kAudioOnly;
695  else if(have_video)
696  pipeline_type_ = kVideoOnly;
697 
698  if (greatest_time_secs > 0)
699  current_end_time_secs_ = greatest_time_secs;
700 }
701 
702 void MediaSourcePipeline::ResetPlaybackHistory() {
703  for (int i = 0; i < kPlaybackPositionHistorySize; i++)
704  playback_position_history_[i] = -1;
705 }
706 
707 void MediaSourcePipeline::PerformSeek() {
708  // put us in a seeking state and stop any reading of the current file(s)
709  seeking_ = true;
710  playback_started_ = false;
711  bool did_pause = false;
712  ResetPlaybackHistory();
713  StopFeedingAppSource(appsrc_source_video_);
714  StopFeedingAppSource(appsrc_source_audio_);
715  CloseAllFiles();
716 
717  // go to the next file(s), and calculate the end time of the file av segment
718  current_file_counter_++;
719  CalculateCurrentEndTime();
720 
721  if(pause_before_seek_) {
722  if (is_playing_) {
723  did_pause = true;
724  is_playing_ = false;
725  DoPause();
726  }
727  }
728 
729  // have gstreamer perform a seek
730  gboolean seek_succeeded = FALSE;
731 
732  int64_t seek_time_us = GetCurrentStartTimeMicroseconds();
733  seek_offset_ = seek_time_us;
734  GstClockTime seek_time_ns =
735  seek_time_us * 1000; // GstClockTime is a time in nanoseconds
736 
737 /*
738  seek_succeeded = gst_element_seek(
739  pipeline_,
740  1.0,
741  GST_FORMAT_TIME,
742  (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
743  GST_SEEK_TYPE_SET,
744  seek_time_ns,
745  GST_SEEK_TYPE_NONE,
746  GST_CLOCK_TIME_NONE);
747  */
748 
749  // A seek is just a flush of the pipeline
750  seek_succeeded = gst_element_send_event(source_, gst_event_new_flush_start());
751  if (!seek_succeeded)
752  printf("failed to send flush-start event\n");
753 
754  seek_succeeded = gst_element_send_event(source_, gst_event_new_flush_stop(TRUE));
755  if (!seek_succeeded)
756  printf("failed to send flush-stop event\n");
757 
758  if (!seek_succeeded) {
759  printf("Failed to seek!\n");
760  } else {
761  // if here gstreamer successfully seeked, now we need to simulate the
762  // mse source performing its own seek before we can
763  // starting reading data again
764 
765  g_timeout_add(kChunkDemuxerSeekDelayMs,
766  reinterpret_cast<GSourceFunc>(ChunkDemuxerSeekStatic),
767  this);
768  }
769 
770  if(pause_before_seek_) {
771  if (did_pause) {
772  is_playing_ = true;
773  DoPause();
774  }
775  }
776 }
777 
779  seeking_ = false;
780 
781  StartFeedingAppSource(appsrc_source_video_);
782  StartFeedingAppSource(appsrc_source_audio_);
783 
784  return FALSE;
785 }
786 
787 void MediaSourcePipeline::DoPause() {
788  g_print ("Setting state to %s\n", is_playing_ ? "PLAYING" : "PAUSE");
789  gst_element_set_state (pipeline_, is_playing_? GST_STATE_PLAYING : GST_STATE_PAUSED);
790 }
791 
792 void MediaSourcePipeline::CloseAllFiles() {
793  if (current_video_file_)
794  fclose(current_video_file_);
795  if (current_video_timestamp_file_)
796  fclose(current_video_timestamp_file_);
797  if (current_audio_file_)
798  fclose(current_audio_file_);
799  if (current_audio_timestamp_file_)
800  fclose(current_audio_timestamp_file_);
801 
802  current_video_file_ = current_video_timestamp_file_ = current_audio_file_ =
803  current_audio_timestamp_file_ = NULL;
804 }
805 
806 ReadStatus MediaSourcePipeline::GetNextFrame(AVFrame* frame, AVType type) {
807  FILE* current_file = NULL;
808  FILE* current_timestamp_file = NULL;
809  std::ostringstream counter_stream;
810  counter_stream << current_file_counter_;
811 
812  if (type == kAudio) {
813  if (current_audio_file_ == NULL) {
814  std::string audio_path = frame_files_path_ + "/raw_audio_frames_" +
815  counter_stream.str() + ".bin";
816  std::string audio_timestamp_path = frame_files_path_ +
817  "/raw_audio_frames_" +
818  counter_stream.str() + ".txt";
819  current_audio_file_ = fopen(audio_path.c_str(), "rb");
820  current_audio_timestamp_file_ = fopen(audio_timestamp_path.c_str(), "r");
821  }
822  current_file = current_audio_file_;
823  current_timestamp_file = current_audio_timestamp_file_;
824  } else { // video
825  if (current_video_file_ == NULL) {
826  std::string video_path = frame_files_path_ + "/raw_video_frames_" +
827  counter_stream.str() + ".bin";
828  std::string video_timestamp_path = frame_files_path_ +
829  "/raw_video_frames_" +
830  counter_stream.str() + ".txt";
831  current_video_file_ = fopen(video_path.c_str(), "rb");
832  current_video_timestamp_file_ = fopen(video_timestamp_path.c_str(), "r");
833  }
834  current_file = current_video_file_;
835  current_timestamp_file = current_video_timestamp_file_;
836  }
837 
838  if (current_file == NULL || current_timestamp_file == NULL)
839  return kDone;
840 
841  // read data from both the timestamp file and the raw frames file
842  // if we fail reading either assume we are at the end of the file
843  // and we need to peform a seek (aka read the next timestamp/datafile)
844  int ret = fscanf(current_timestamp_file,
845  "%" PRId64 ",%d,",
846  &frame->timestamp_us_,
847  &frame->size_);
848  if (ret != 2)
849  return kPerformSeek;
850 
851  frame->data_ = static_cast<guint8*>(g_malloc(frame->size_));
852  ret = fread(frame->data_, 1, frame->size_, current_file);
853  if (ret != frame->size_) {
854  g_free(frame->data_);
855  return kPerformSeek;
856  }
857 
858  // if we make it here, we have succesfully read a frame
859  return kFrameRead;
860 }
861 
862 bool MediaSourcePipeline::Build()
863 {
864  source_ = NULL;
865  appsrc_caps_video_ = NULL;
866  appsrc_caps_audio_ = NULL;
867  appsrc_source_video_ = (GstAppSrc*) gst_element_factory_make("appsrc", NULL);
868  appsrc_source_audio_ = (GstAppSrc*) gst_element_factory_make("appsrc", NULL);
869 
870  g_signal_connect(
871  appsrc_source_video_, "need-data", G_CALLBACK(StartFeedStatic), this);
872  g_signal_connect(
873  appsrc_source_video_, "enough-data", G_CALLBACK(StopFeedStatic), this);
874 
875 
876  g_signal_connect(
877  appsrc_source_audio_, "need-data", G_CALLBACK(StartFeedStatic), this);
878  g_signal_connect(
879  appsrc_source_audio_, "enough-data", G_CALLBACK(StopFeedStatic), this);
880 
881  g_object_set(G_OBJECT(appsrc_source_video_),
882  "stream-type",
883  GST_APP_STREAM_TYPE_SEEKABLE,
884  "format",
885  GST_FORMAT_TIME,
886  NULL);
887 
888  g_object_set(G_OBJECT(appsrc_source_audio_),
889  "stream-type",
890  GST_APP_STREAM_TYPE_SEEKABLE,
891  "format",
892  GST_FORMAT_TIME,
893  NULL);
894 
895  g_signal_connect(
896  appsrc_source_video_, "seek-data", G_CALLBACK(SeekDataStatic), this);
897 
898  g_signal_connect(
899  appsrc_source_audio_, "seek-data", G_CALLBACK(SeekDataStatic), this);
900 
901  //gchar* caps_string_video = g_strdup_printf("video/x-h264, alignment=(string)au, stream-format=(string)byte-stream");
902  //gchar* caps_string_audio = g_strdup_printf("audio/mpeg, mpegversion=4");
903 
904  // original video caps
905  //gchar* caps_string_video = g_strdup_printf("video/x-h264, stream-format=(string)avc, alignment=(string)au, level=(string)3.1, profile=(string)main, codec_data=(buffer)014d401fffe1001c674d401fe8802802dd80b501010140000003004000000c03c60c448001000468ebef20, width=(int)1280, height=(int)720, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction)100000/4201");
906  //gchar* caps_string_audio = g_strdup_printf("audio/mpeg, mpegversion=(int)4, framed=(boolean)true, stream-format=(string)raw, level=(string)2, base-profile=(string)lc, profile=(string)lc, codec_data=(buffer)1210, rate=(int)44100, channels=(int)2");
907 
908  // rdk video caps
909  gchar* caps_string_video = g_strdup_printf("video/x-h264, stream-format=(string)avc, alignment=(string)au, level=(string)3.1, profile=(string)main, codec_data=(buffer)014d401fffe1001b674d401fe8802802dd80b5010101400000fa40003a9803c60c448001000468ebaf20, width=(int)1280, height=(int)720, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction)100000/3357");
910  gchar* caps_string_audio = g_strdup_printf("audio/mpeg, mpegversion=(int)4, framed=(boolean)true, stream-format=(string)raw, level=(string)2, base-profile=(string)lc, profile=(string)lc, codec_data=(buffer)1210, rate=(int)44100, channels=(int)2");
911 
912  appsrc_caps_video_ = gst_caps_from_string(caps_string_video);
913  appsrc_caps_audio_ = gst_caps_from_string(caps_string_audio);
914  g_free(caps_string_video);
915  g_free(caps_string_audio);
916 
917  GstElementFactory* src_factory = gst_element_factory_find("msesrc");
918  if (!src_factory) {
919  gst_element_register(0, "msesrc", GST_RANK_PRIMARY + 100, GST_MSE_TYPE_SRC);
920  } else {
921  gst_object_unref(src_factory);
922  }
923 
924  pipeline_ = gst_element_factory_make("playbin", NULL);
925  g_signal_connect(pipeline_, "source-setup", G_CALLBACK(sourceChangedCallback), this);
926 
927  // make westeros sink our video sink
928  video_sink_ = gst_element_factory_make("westerossink", "vsink");
929  //g_object_set(G_OBJECT(video_sink_), "sync", 1, NULL );
930  g_object_set(G_OBJECT(pipeline_), "video-sink", video_sink_, NULL );
931  /* Secure video path - SVP is available for Broadcom platform 16.2 and above */
932  if( g_object_class_find_property( G_OBJECT_GET_CLASS( video_sink_ ), "secure-video" ) )
933  {
934  g_object_set( G_OBJECT( video_sink_ ), "secure-video", true, NULL );
935  }
936 
937  unsigned flagAudio = getGstPlayFlag("audio");
938  unsigned flagVideo = getGstPlayFlag("video");
939  unsigned flagNativeVideo = getGstPlayFlag("native-video");
940  unsigned flagBuffering = getGstPlayFlag("buffering");
941 
942  g_object_set(pipeline_, "uri", "mse://", "flags", flagAudio | flagVideo | flagNativeVideo | flagBuffering, NULL);
943 
944  GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_));
945  gst_bus_add_signal_watch(bus);
946  g_signal_connect(bus, "message", G_CALLBACK(MessageCallbackStatic), this);
947  gst_object_unref(bus);
948 
949  return true;
950 }
951 
952 void MediaSourcePipeline::StopAllTimeouts()
953 {
954  seeking_ = true;
955  g_source_remove(status_timeout_handle_);
956  StopFeedingAppSource(appsrc_source_video_);
957  StopFeedingAppSource(appsrc_source_audio_);
958 }
959 
960 void MediaSourcePipeline::Destroy() {
961  StopAllTimeouts();
962  CloseAllFiles();
963 
964  if (pipeline_) {
965  GstState state;
966  GstState pending;
967 
968  // clear out the pipeline
969  gst_element_set_state (pipeline_, GST_STATE_NULL);
970  gst_element_get_state (pipeline_,&state,&pending,GST_CLOCK_TIME_NONE);
971  while (state!=GST_STATE_NULL) {
972  gst_element_get_state (pipeline_,&state,&pending,GST_CLOCK_TIME_NONE);
973  usleep(10000);
974  }
975  gst_object_unref(GST_OBJECT(pipeline_));
976 
977  if (source_)
978  gst_object_unref(source_);
979  if (appsrc_caps_video_)
980  gst_caps_unref(appsrc_caps_video_);
981  if (appsrc_caps_audio_)
982  gst_caps_unref(appsrc_caps_audio_);
983 
984  pipeline_ = NULL;
985  appsrc_source_video_ = NULL;
986  appsrc_source_audio_ = NULL;
987  video_sink_ = NULL;
988  audio_sink_ = NULL;
989  source_ = NULL;
990  appsrc_caps_video_ = NULL;
991  appsrc_caps_audio_ = NULL;
992 
993  printf("Pipeline Destroyed\n");
994  }
995 }
996 
997 bool MediaSourcePipeline::Start() {
998  CalculateCurrentEndTime();
999 
1000  if (!Build()) {
1001  fprintf(stderr, "Failed to build gstreamer pipeline\n");
1002  return false;
1003  }
1004 
1005  printf("Current end time:%f secs\n", current_end_time_secs_);
1006 
1007  printf("Pausing pipeline!\n");
1008  gst_element_set_state(pipeline_, GST_STATE_PAUSED);
1009 
1010  status_timeout_handle_ = g_timeout_add(
1011  kStatusDelayMs, reinterpret_cast<GSourceFunc>(StatusPollStatic), this);
1012 
1013  return true;
1014 }
1015 
1016 void MediaSourcePipeline::HandleKeyboardInput(unsigned int key) {
1017 
1018  switch (key) {
1019  case KEY_P: // pause/play
1020  is_playing_ = !is_playing_;
1021  DoPause();
1022  break;
1023  default:
1024  break;
1025  }
1026 }
1027 
1028 rtError MediaSourcePipeline::suspend()
1029 {
1030  if(is_active_)
1031  {
1032  printf("MediaSourcePipeline is going to suspend\n");
1033  // Destroy gstreamer pipeline to free all AV resources
1034  Destroy();
1035  is_active_ = false;
1036  }
1037  return RT_OK;
1038 }
1039 
1040 rtError MediaSourcePipeline::resume()
1041 {
1042  if(!is_active_)
1043  {
1044  printf("MediaSourcePipeline is going to resume\n");
1045  // Re-create the pipeline and start playing again
1046  Init();
1047  Start();
1048  is_active_ = true;
1049  }
1050  return RT_OK;
1051 }
MediaSourcePipeline::sourceChanged
void sourceChanged()
This signal callback is called when "source-setup" is emitted.
Definition: mediasourcepipeline.cpp:128
MediaSourcePipeline::SetNewAppSourceReadPosition
void SetNewAppSourceReadPosition(GstAppSrc *p_src, guint64 position)
This API is to seek the app's position and set the position.
Definition: mediasourcepipeline.cpp:454
MediaSourcePipeline::OnAutoPadAddedMediaSource
void OnAutoPadAddedMediaSource(GstElement *element, GstPad *pad)
This API links auto pad added elements in audio pipeline or video pipeline depending upon the element...
Definition: mediasourcepipeline.cpp:457
MediaSourcePipeline::ChunkDemuxerSeek
gboolean ChunkDemuxerSeek()
This API is used by the mse source to perform its own seek before starting the read data again.
Definition: mediasourcepipeline.cpp:778
MediaSourcePipeline::StatusPoll
gboolean StatusPoll()
This API queries the playback position.
Definition: mediasourcepipeline.cpp:306
MediaSourcePipeline::HandleMessage
gboolean HandleMessage(GstMessage *message)
This API handles different message types.
Definition: mediasourcepipeline.cpp:136
MediaSourcePipeline::OnAutoElementAddedMediaSource
void OnAutoElementAddedMediaSource(GstElement *element)
This API checks for the dynamic addition of real audio sink.
Definition: mediasourcepipeline.cpp:496
MediaSourcePipeline::StopFeedingAppSource
void StopFeedingAppSource(GstAppSrc *p_src)
This signal callback is called when appsrc have enough-data.
Definition: mediasourcepipeline.cpp:438
AVFrame
Definition: mediasourcepipeline.h:45
MediaSourcePipeline::ReadVideoFrame
gboolean ReadVideoFrame()
This API reads data from both the audio/video timestamp file, raw frames file and push the details to...
Definition: mediasourcepipeline.cpp:353
MediaSourcePipeline::StartFeedingAppSource
void StartFeedingAppSource(GstAppSrc *p_src)
This signal callback is called when appsrc needs data.
Definition: mediasourcepipeline.cpp:412
MediaSourcePipeline::ReadAudioFrame
gboolean ReadAudioFrame()
This API reads the audio frame details and push the details to appsrc.
Definition: mediasourcepipeline.cpp:382
MediaSourcePipeline
Definition: mediasourcepipeline.h:51
TRUE
#define TRUE
Defines for TRUE/FALSE/ENABLE flags.
Definition: wifi_common_hal.h:199