20 #include "mediasourcepipeline.h"
21 #include "GstMSESrc.h"
23 #define __STDC_FORMAT_MACROS
25 #include <linux/input.h>
39 const int kVideoReadDelayMs =
41 const int kAudioReadDelayMs =
43 const int kStatusDelayMs =
45 const float kSeekEndDeltaSecs =
47 const int kChunkDemuxerSeekDelayMs =
49 const int kPlaybackPositionHistorySize =
52 const int64_t kPlaybackPositionUpdateIntervalMs =
60 unsigned getGstPlayFlag(
const char* nick)
62 static GFlagsClass* flagsClass =
static_cast<GFlagsClass*
>(g_type_class_ref(g_type_from_name(
"GstPlayFlags")));
64 GFlagsValue* flag = g_flags_get_value_by_nick(flagsClass, nick);
71 static gboolean MessageCallbackStatic(GstBus*,
77 static void StartFeedStatic(GstAppSrc* appsrc,
87 static gboolean SeekDataStatic(GstAppSrc* appsrc,
94 static void OnAutoPadAddedMediaSourceStatic(GstElement* decodebin2,
100 static void OnAutoElementAddedMediaSourceStatic(GstBin* bin,
122 static void sourceChangedCallback(GstElement* element, GstElement* source, gpointer data)
131 g_object_get(pipeline_,
"source", &source_, NULL);
133 printf(
"sourceChanged!:%p\n",source_);
139 switch (GST_MESSAGE_TYPE(message)){
140 case GST_MESSAGE_ERROR:
141 gst_message_parse_error(message, &error, &debug);
142 printf(
"gstBusCallback() error! code: %d, %s, Debug: %s\n", error->code, error->message, debug);
145 GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pipeline_), GST_DEBUG_GRAPH_SHOW_ALL,
"error-pipeline");
147 case GST_MESSAGE_WARNING:
148 gst_message_parse_warning(message, &error, &debug);
149 printf(
"gstBusCallback() warning! code: %d, %s, Debug: %s\n", error->code, error->message, debug);
153 case GST_MESSAGE_EOS: {
155 printf(
"Gstreamer EOS message received\n");
158 case GST_MESSAGE_STATE_CHANGED:
160 GstState oldstate, newstate, pending;
161 gst_message_parse_state_changed (message, &oldstate, &newstate, &pending);
165 if (GST_ELEMENT(GST_MESSAGE_SRC(message)) != pipeline_)
168 filename = g_strdup_printf(
"%s-%s", gst_element_state_get_name(oldstate), gst_element_state_get_name(newstate));
169 GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pipeline_), GST_DEBUG_GRAPH_SHOW_ALL, filename);
173 if (GST_MESSAGE_SRC_NAME(message)){
176 printf(
"gstBusCallback() old_state %s, new_state %s, pending %s\n",
177 gst_element_state_get_name (oldstate), gst_element_state_get_name (newstate), gst_element_state_get_name (pending));
179 if (oldstate == GST_STATE_NULL && newstate == GST_STATE_READY) {
180 }
else if (oldstate == GST_STATE_READY && newstate == GST_STATE_PAUSED) {
181 GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pipeline_), GST_DEBUG_GRAPH_SHOW_ALL,
"paused-pipeline");
182 printf(
"Ready to Paused finished!\n");
183 }
else if (oldstate == GST_STATE_PAUSED && newstate == GST_STATE_PAUSED) {
184 }
else if (oldstate == GST_STATE_PAUSED && newstate == GST_STATE_PLAYING) {
185 printf(
"Pipeline is now in play state!\n");
186 GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pipeline_), GST_DEBUG_GRAPH_SHOW_ALL,
"playing-pipeline");
187 }
else if (oldstate == GST_STATE_PLAYING && newstate == GST_STATE_PAUSED) {
188 printf(
"Pipline finished from play to pause\n");
189 }
else if (oldstate == GST_STATE_PAUSED && newstate == GST_STATE_READY) {
190 }
else if (oldstate == GST_STATE_READY && newstate == GST_STATE_NULL) {
199 bool MediaSourcePipeline::ShouldPerformSeek() {
200 float seek_delta_secs =
201 std::fabs(playback_position_secs_ - current_end_time_secs_);
203 if (seek_delta_secs < kSeekEndDeltaSecs || playback_position_secs_ >= current_end_time_secs_)
210 if (is_playing_ && IsPlaybackStalled())
216 bool MediaSourcePipeline::IsPlaybackOver() {
219 std::ostringstream counter_stream;
220 counter_stream << (current_file_counter_ + 1);
222 std::string audio_timestamp_path =
223 frame_files_path_ +
"/raw_audio_frames_" + counter_stream.str() +
".txt";
224 std::string video_timestamp_path =
225 frame_files_path_ +
"/raw_video_frames_" + counter_stream.str() +
".txt";
227 FILE* audio_timestamp_file = fopen(audio_timestamp_path.c_str(),
"r");
228 FILE* video_timestamp_file = fopen(video_timestamp_path.c_str(),
"r");
230 if (audio_timestamp_file == NULL && video_timestamp_file == NULL) {
233 if (audio_timestamp_file)
234 fclose(audio_timestamp_file);
235 if (video_timestamp_file)
236 fclose(video_timestamp_file);
241 bool MediaSourcePipeline::HasPlaybackAdvanced() {
242 int32_t pos = current_playback_history_cnt_ - 1;
244 pos = kPlaybackPositionHistorySize - 1;
247 int64_t val = playback_position_history_[pos];
248 bool did_advance =
true;
249 while (counter < kPlaybackPositionHistorySize) {
252 pos = kPlaybackPositionHistorySize - 1;
254 if (val <= playback_position_history_[pos] ||
255 playback_position_history_[pos] == -1) {
260 val = playback_position_history_[pos];
267 void MediaSourcePipeline::AddPlaybackPositionToHistory(int64_t position) {
269 gst_element_get_state(pipeline_, &state, 0, 0);
271 if (state == GST_STATE_PLAYING) {
272 playback_position_history_[current_playback_history_cnt_] = position;
273 current_playback_history_cnt_ =
274 (current_playback_history_cnt_ + 1) % kPlaybackPositionHistorySize;
278 bool MediaSourcePipeline::IsPlaybackStalled() {
279 int64_t first_pos = playback_position_history_[0];
281 for (
size_t i = 1; i < playback_position_history_.size(); i++) {
282 if (playback_position_history_[i] != first_pos ||
283 playback_position_history_[i] < 0)
290 void MediaSourcePipeline::finishPipelineLinkingAndStartPlaybackIfNeeded()
292 if (source_ && !gst_mse_src_configured(source_)) {
293 if(pipeline_type_ != kAudioOnly)
294 gst_mse_src_register_player(source_, (GstElement*) appsrc_source_video_);
295 if(pipeline_type_ != kVideoOnly)
296 gst_mse_src_register_player(source_, (GstElement*) appsrc_source_audio_);
298 gst_mse_src_configuration_done(source_);
300 printf(
"Finished linking pipeline and putting it in play!\n");
301 gst_element_set_state(pipeline_, GST_STATE_PLAYING);
307 GstFormat fmt = GST_FORMAT_TIME;
308 gint64 position = -1;
311 finishPipelineLinkingAndStartPlaybackIfNeeded();
313 if(pipeline_type_ != kAudioOnly)
314 gst_element_query_position(video_sink_, fmt, &position);
316 gst_element_query_position(pipeline_, fmt, &position);
318 position += (seek_offset_ * 1000);
319 if (position !=
static_cast<gint64
>(GST_CLOCK_TIME_NONE)) {
320 AddPlaybackPositionToHistory(position);
321 if (!playback_started_)
322 playback_started_ = HasPlaybackAdvanced();
323 playback_position_secs_ = (
static_cast<double>(position) / GST_SECOND);
325 static int64_t position_update_cnt = 0;
326 if (position_update_cnt == 0)
327 printf(
"playback position: %f secs\n", playback_position_secs_);
329 position_update_cnt = (position_update_cnt + kStatusDelayMs) %
330 kPlaybackPositionUpdateIntervalMs;
334 printf(
"playback started:%d\n", playback_started_);
336 if (ShouldPerformSeek()) {
337 if (IsPlaybackOver()) {
338 printf(
"Current end time:%f\n", current_end_time_secs_);
339 printf(
"Playback Complete! Starting over...\n");
342 current_file_counter_ = -1;
345 printf(
"Performing Seek!\n");
355 video_frame_timeout_handle_ = 0;
360 ReadStatus read_status = GetNextFrame(&video_frame, kVideo);
362 printf(
"Video frame read status:%d\n", read_status);
365 if (read_status != kFrameRead) {
366 video_frame_timeout_handle_ = 0;
371 float frame_time_seconds = video_frame.timestamp_us_ / 1000000.0f;
372 printf(
"read video frame: time:%f secs, size:%d bytes\n",
377 PushFrameToAppSrc(video_frame, kVideo);
384 audio_frame_timeout_handle_ = 0;
389 ReadStatus read_status = GetNextFrame(&audio_frame, kAudio);
392 printf(
"Audio frame read status:%d\n", read_status);
395 if (read_status != kFrameRead) {
396 audio_frame_timeout_handle_ = 0;
401 float frame_time_seconds = audio_frame.timestamp_us_ / 1000000.0f;
402 printf(
"read audio frame: time:%f secs, size:%d bytes\n",
407 PushFrameToAppSrc(audio_frame, kAudio);
416 bool start_up_reading_again =
false;
418 start_up_reading_again =
419 !ShouldBeReading(p_src == appsrc_source_video_ ? kVideo : kAudio);
420 if (start_up_reading_again)
421 SetShouldBeReading(
true, p_src == appsrc_source_video_ ? kVideo : kAudio);
423 if (start_up_reading_again) {
424 if (p_src == appsrc_source_video_) {
425 video_frame_timeout_handle_ =
426 g_timeout_add(kVideoReadDelayMs,
427 reinterpret_cast<GSourceFunc
>(readVideoFrameStatic),
430 audio_frame_timeout_handle_ =
431 g_timeout_add(kAudioReadDelayMs,
432 reinterpret_cast<GSourceFunc
>(readAudioFrameStatic),
439 if (p_src == appsrc_source_video_) {
440 if (video_frame_timeout_handle_) {
441 g_source_remove(video_frame_timeout_handle_);
442 video_frame_timeout_handle_ = 0;
444 SetShouldBeReading(
false, kVideo);
446 if (audio_frame_timeout_handle_) {
447 g_source_remove(audio_frame_timeout_handle_);
448 audio_frame_timeout_handle_ = 0;
450 SetShouldBeReading(
false, kAudio);
460 GstStructure* structure;
462 std::vector<GstElement*>::iterator src_it;
463 std::vector<GstElement*>::iterator dest_it;
465 caps = gst_pad_query_caps(pad,NULL);
466 structure = gst_caps_get_structure(caps, 0);
467 name = gst_structure_get_name(structure);
471 if (g_strrstr(name,
"video")) {
473 ms_video_pipeline_.begin(), ms_video_pipeline_.end(), element);
474 dest_it = src_it + 1;
475 if (src_it != ms_video_pipeline_.end() &&
476 dest_it != ms_video_pipeline_.end()) {
477 if (gst_element_link(*src_it, *dest_it) == FALSE) {
478 g_print(
"Couldn't link auto pad added elements in video pipeline\n");
481 }
else if (g_strrstr(name,
"audio")) {
483 ms_audio_pipeline_.begin(), ms_audio_pipeline_.end(), element);
484 dest_it = src_it + 1;
485 if (src_it != ms_audio_pipeline_.end() &&
486 dest_it != ms_audio_pipeline_.end()) {
487 if (gst_element_link(*src_it, *dest_it) == FALSE) {
488 g_print(
"Couldn't link auto pad added elements in audio pipeline\n");
493 gst_caps_unref(caps);
498 if (g_strrstr(GST_ELEMENT_NAME(element),
"audio") &&
499 g_strrstr(GST_ELEMENT_NAME(element),
"sink")) {
500 audio_sink_ = element;
504 MediaSourcePipeline::MediaSourcePipeline(std::string frame_files_path)
505 : frame_files_path_(frame_files_path)
510 MediaSourcePipeline::~MediaSourcePipeline() { Destroy(); }
512 void MediaSourcePipeline::Init()
514 current_file_counter_ = 0;
515 current_video_file_ = NULL;
516 current_video_timestamp_file_ = NULL;
517 current_audio_file_ = NULL;
518 current_audio_timestamp_file_ = NULL;
521 appsrc_source_video_ = NULL;
522 appsrc_source_audio_ = NULL;
525 playback_position_secs_ = 0;
526 current_end_time_secs_ = 0;
527 video_frame_timeout_handle_ = 0;
528 audio_frame_timeout_handle_ = 0;
529 status_timeout_handle_ = 0;
530 current_playback_history_cnt_ = 0;
531 playback_started_ =
false;
533 pipeline_type_ = kAudioVideo;
535 appsrc_caps_video_ = NULL;
536 appsrc_caps_audio_ = NULL;
537 pause_before_seek_ =
false;
542 memset(&should_be_reading_, 0,
sizeof(should_be_reading_));
544 playback_position_history_.resize(kPlaybackPositionHistorySize, 0);
545 ResetPlaybackHistory();
548 bool MediaSourcePipeline::ShouldBeReading(AVType av) {
549 return should_be_reading_[av];
552 void MediaSourcePipeline::SetShouldBeReading(
bool is_reading, AVType av) {
553 should_be_reading_[av] = is_reading;
556 bool MediaSourcePipeline::PushFrameToAppSrc(
const AVFrame& frame, AVType type) {
557 GstFlowReturn ret = GST_FLOW_OK;
559 GstBuffer* gst_buffer = gst_buffer_new_wrapped(frame.data_, frame.size_);
560 GstSample* sample = NULL;
561 GST_BUFFER_TIMESTAMP(gst_buffer) = (frame.timestamp_us_ - seek_offset_) * 1000;
565 sample = gst_sample_new(gst_buffer, appsrc_caps_video_, NULL, NULL);
566 ret = gst_app_src_push_sample(GST_APP_SRC(appsrc_source_video_), sample);
570 sample = gst_sample_new(gst_buffer, appsrc_caps_audio_, NULL, NULL);
571 ret = gst_app_src_push_sample(GST_APP_SRC(appsrc_source_audio_), sample);
574 gst_buffer_unref(gst_buffer);
575 gst_sample_unref(sample);
577 if (ret != GST_FLOW_OK) {
578 fprintf(stderr,
"APPSRC PUSH FAILED!\n");
585 int64_t MediaSourcePipeline::GetCurrentStartTimeMicroseconds()
const {
586 std::ostringstream counter_stream;
587 counter_stream << current_file_counter_;
589 std::string audio_timestamp_path =
590 frame_files_path_ +
"/raw_audio_frames_" + counter_stream.str() +
".txt";
591 std::string video_timestamp_path =
592 frame_files_path_ +
"/raw_video_frames_" + counter_stream.str() +
".txt";
594 FILE* audio_timestamp_file = fopen(audio_timestamp_path.c_str(),
"r");
595 FILE* video_timestamp_file = fopen(video_timestamp_path.c_str(),
"r");
597 int64_t smallest_time_ms = -1;
599 int64_t timestamp_us;
601 if (audio_timestamp_file) {
602 if (fscanf(audio_timestamp_file,
606 smallest_time_ms = timestamp_us;
608 fclose(audio_timestamp_file);
611 if (video_timestamp_file) {
612 if (fscanf(video_timestamp_file,
616 if (smallest_time_ms == -1)
617 smallest_time_ms = timestamp_us;
619 smallest_time_ms = std::min(smallest_time_ms, timestamp_us);
621 fclose(video_timestamp_file);
624 return smallest_time_ms;
627 void MediaSourcePipeline::CalculateCurrentEndTime() {
628 std::ostringstream counter_stream;
629 counter_stream << current_file_counter_;
630 current_end_time_secs_ = 0;
631 bool have_audio =
false;
632 bool have_video =
false;
634 std::string audio_timestamp_path =
635 frame_files_path_ +
"/raw_audio_frames_" + counter_stream.str() +
".txt";
636 std::string video_timestamp_path =
637 frame_files_path_ +
"/raw_video_frames_" + counter_stream.str() +
".txt";
639 FILE* audio_timestamp_file = fopen(audio_timestamp_path.c_str(),
"r");
640 FILE* video_timestamp_file = fopen(video_timestamp_path.c_str(),
"r");
642 float greatest_time_secs = 0;
643 float greatest_video_time_secs = 0;
644 float greatest_audio_time_secs = 0;
646 int64_t timestamp_us;
648 if (audio_timestamp_file) {
649 while (fscanf(audio_timestamp_file,
653 float timestamp_secs = timestamp_us / 1000000.0f;
654 if (greatest_audio_time_secs < timestamp_secs)
655 greatest_audio_time_secs = timestamp_secs;
657 fclose(audio_timestamp_file);
661 if (video_timestamp_file) {
662 while (fscanf(video_timestamp_file,
666 float timestamp_secs = timestamp_us / 1000000.0f;
667 if (greatest_video_time_secs < timestamp_secs)
668 greatest_video_time_secs = timestamp_secs;
670 fclose(video_timestamp_file);
674 if (greatest_audio_time_secs > 0 && greatest_video_time_secs > 0)
676 std::min(greatest_audio_time_secs, greatest_video_time_secs);
677 else if (greatest_audio_time_secs > 0)
678 greatest_time_secs = greatest_audio_time_secs;
679 else if (greatest_video_time_secs > 0)
680 greatest_time_secs = greatest_video_time_secs;
683 printf(
"calculated end time, counter:%d, end time:%f\n",
684 current_file_counter_,
686 printf(
"greatest audio time:%f, greatest video time:%f\n",
687 greatest_audio_time_secs,
688 greatest_video_time_secs);
691 if(have_audio && have_video)
692 pipeline_type_ = kAudioVideo;
694 pipeline_type_ = kAudioOnly;
696 pipeline_type_ = kVideoOnly;
698 if (greatest_time_secs > 0)
699 current_end_time_secs_ = greatest_time_secs;
702 void MediaSourcePipeline::ResetPlaybackHistory() {
703 for (
int i = 0; i < kPlaybackPositionHistorySize; i++)
704 playback_position_history_[i] = -1;
707 void MediaSourcePipeline::PerformSeek() {
710 playback_started_ =
false;
711 bool did_pause =
false;
712 ResetPlaybackHistory();
718 current_file_counter_++;
719 CalculateCurrentEndTime();
721 if(pause_before_seek_) {
730 gboolean seek_succeeded = FALSE;
732 int64_t seek_time_us = GetCurrentStartTimeMicroseconds();
733 seek_offset_ = seek_time_us;
734 GstClockTime seek_time_ns =
750 seek_succeeded = gst_element_send_event(source_, gst_event_new_flush_start());
752 printf(
"failed to send flush-start event\n");
754 seek_succeeded = gst_element_send_event(source_, gst_event_new_flush_stop(
TRUE));
756 printf(
"failed to send flush-stop event\n");
758 if (!seek_succeeded) {
759 printf(
"Failed to seek!\n");
765 g_timeout_add(kChunkDemuxerSeekDelayMs,
766 reinterpret_cast<GSourceFunc
>(ChunkDemuxerSeekStatic),
770 if(pause_before_seek_) {
787 void MediaSourcePipeline::DoPause() {
788 g_print (
"Setting state to %s\n", is_playing_ ?
"PLAYING" :
"PAUSE");
789 gst_element_set_state (pipeline_, is_playing_? GST_STATE_PLAYING : GST_STATE_PAUSED);
792 void MediaSourcePipeline::CloseAllFiles() {
793 if (current_video_file_)
794 fclose(current_video_file_);
795 if (current_video_timestamp_file_)
796 fclose(current_video_timestamp_file_);
797 if (current_audio_file_)
798 fclose(current_audio_file_);
799 if (current_audio_timestamp_file_)
800 fclose(current_audio_timestamp_file_);
802 current_video_file_ = current_video_timestamp_file_ = current_audio_file_ =
803 current_audio_timestamp_file_ = NULL;
806 ReadStatus MediaSourcePipeline::GetNextFrame(
AVFrame* frame, AVType type) {
807 FILE* current_file = NULL;
808 FILE* current_timestamp_file = NULL;
809 std::ostringstream counter_stream;
810 counter_stream << current_file_counter_;
812 if (type == kAudio) {
813 if (current_audio_file_ == NULL) {
814 std::string audio_path = frame_files_path_ +
"/raw_audio_frames_" +
815 counter_stream.str() +
".bin";
816 std::string audio_timestamp_path = frame_files_path_ +
817 "/raw_audio_frames_" +
818 counter_stream.str() +
".txt";
819 current_audio_file_ = fopen(audio_path.c_str(),
"rb");
820 current_audio_timestamp_file_ = fopen(audio_timestamp_path.c_str(),
"r");
822 current_file = current_audio_file_;
823 current_timestamp_file = current_audio_timestamp_file_;
825 if (current_video_file_ == NULL) {
826 std::string video_path = frame_files_path_ +
"/raw_video_frames_" +
827 counter_stream.str() +
".bin";
828 std::string video_timestamp_path = frame_files_path_ +
829 "/raw_video_frames_" +
830 counter_stream.str() +
".txt";
831 current_video_file_ = fopen(video_path.c_str(),
"rb");
832 current_video_timestamp_file_ = fopen(video_timestamp_path.c_str(),
"r");
834 current_file = current_video_file_;
835 current_timestamp_file = current_video_timestamp_file_;
838 if (current_file == NULL || current_timestamp_file == NULL)
844 int ret = fscanf(current_timestamp_file,
846 &frame->timestamp_us_,
851 frame->data_ =
static_cast<guint8*
>(g_malloc(frame->size_));
852 ret = fread(frame->data_, 1, frame->size_, current_file);
853 if (ret != frame->size_) {
854 g_free(frame->data_);
862 bool MediaSourcePipeline::Build()
865 appsrc_caps_video_ = NULL;
866 appsrc_caps_audio_ = NULL;
867 appsrc_source_video_ = (GstAppSrc*) gst_element_factory_make(
"appsrc", NULL);
868 appsrc_source_audio_ = (GstAppSrc*) gst_element_factory_make(
"appsrc", NULL);
871 appsrc_source_video_,
"need-data", G_CALLBACK(StartFeedStatic),
this);
873 appsrc_source_video_,
"enough-data", G_CALLBACK(StopFeedStatic),
this);
877 appsrc_source_audio_,
"need-data", G_CALLBACK(StartFeedStatic),
this);
879 appsrc_source_audio_,
"enough-data", G_CALLBACK(StopFeedStatic),
this);
881 g_object_set(G_OBJECT(appsrc_source_video_),
883 GST_APP_STREAM_TYPE_SEEKABLE,
888 g_object_set(G_OBJECT(appsrc_source_audio_),
890 GST_APP_STREAM_TYPE_SEEKABLE,
896 appsrc_source_video_,
"seek-data", G_CALLBACK(SeekDataStatic),
this);
899 appsrc_source_audio_,
"seek-data", G_CALLBACK(SeekDataStatic),
this);
909 gchar* caps_string_video = g_strdup_printf(
"video/x-h264, stream-format=(string)avc, alignment=(string)au, level=(string)3.1, profile=(string)main, codec_data=(buffer)014d401fffe1001b674d401fe8802802dd80b5010101400000fa40003a9803c60c448001000468ebaf20, width=(int)1280, height=(int)720, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction)100000/3357");
910 gchar* caps_string_audio = g_strdup_printf(
"audio/mpeg, mpegversion=(int)4, framed=(boolean)true, stream-format=(string)raw, level=(string)2, base-profile=(string)lc, profile=(string)lc, codec_data=(buffer)1210, rate=(int)44100, channels=(int)2");
912 appsrc_caps_video_ = gst_caps_from_string(caps_string_video);
913 appsrc_caps_audio_ = gst_caps_from_string(caps_string_audio);
914 g_free(caps_string_video);
915 g_free(caps_string_audio);
917 GstElementFactory* src_factory = gst_element_factory_find(
"msesrc");
919 gst_element_register(0,
"msesrc", GST_RANK_PRIMARY + 100, GST_MSE_TYPE_SRC);
921 gst_object_unref(src_factory);
924 pipeline_ = gst_element_factory_make(
"playbin", NULL);
925 g_signal_connect(pipeline_,
"source-setup", G_CALLBACK(sourceChangedCallback),
this);
928 video_sink_ = gst_element_factory_make(
"westerossink",
"vsink");
930 g_object_set(G_OBJECT(pipeline_),
"video-sink", video_sink_, NULL );
932 if( g_object_class_find_property( G_OBJECT_GET_CLASS( video_sink_ ),
"secure-video" ) )
934 g_object_set( G_OBJECT( video_sink_ ),
"secure-video",
true, NULL );
937 unsigned flagAudio = getGstPlayFlag(
"audio");
938 unsigned flagVideo = getGstPlayFlag(
"video");
939 unsigned flagNativeVideo = getGstPlayFlag(
"native-video");
940 unsigned flagBuffering = getGstPlayFlag(
"buffering");
942 g_object_set(pipeline_,
"uri",
"mse://",
"flags", flagAudio | flagVideo | flagNativeVideo | flagBuffering, NULL);
944 GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_));
945 gst_bus_add_signal_watch(bus);
946 g_signal_connect(bus,
"message", G_CALLBACK(MessageCallbackStatic),
this);
947 gst_object_unref(bus);
952 void MediaSourcePipeline::StopAllTimeouts()
955 g_source_remove(status_timeout_handle_);
960 void MediaSourcePipeline::Destroy() {
969 gst_element_set_state (pipeline_, GST_STATE_NULL);
970 gst_element_get_state (pipeline_,&state,&pending,GST_CLOCK_TIME_NONE);
971 while (state!=GST_STATE_NULL) {
972 gst_element_get_state (pipeline_,&state,&pending,GST_CLOCK_TIME_NONE);
975 gst_object_unref(GST_OBJECT(pipeline_));
978 gst_object_unref(source_);
979 if (appsrc_caps_video_)
980 gst_caps_unref(appsrc_caps_video_);
981 if (appsrc_caps_audio_)
982 gst_caps_unref(appsrc_caps_audio_);
985 appsrc_source_video_ = NULL;
986 appsrc_source_audio_ = NULL;
990 appsrc_caps_video_ = NULL;
991 appsrc_caps_audio_ = NULL;
993 printf(
"Pipeline Destroyed\n");
997 bool MediaSourcePipeline::Start() {
998 CalculateCurrentEndTime();
1001 fprintf(stderr,
"Failed to build gstreamer pipeline\n");
1005 printf(
"Current end time:%f secs\n", current_end_time_secs_);
1007 printf(
"Pausing pipeline!\n");
1008 gst_element_set_state(pipeline_, GST_STATE_PAUSED);
1010 status_timeout_handle_ = g_timeout_add(
1011 kStatusDelayMs,
reinterpret_cast<GSourceFunc
>(StatusPollStatic),
this);
1016 void MediaSourcePipeline::HandleKeyboardInput(
unsigned int key) {
1020 is_playing_ = !is_playing_;
1028 rtError MediaSourcePipeline::suspend()
1032 printf(
"MediaSourcePipeline is going to suspend\n");
1040 rtError MediaSourcePipeline::resume()
1044 printf(
"MediaSourcePipeline is going to resume\n");