1. 程式人生 > >gstreamer播放教程一:playbin——獲取媒體的流資訊、切換流。

gstreamer播放教程一:playbin——獲取媒體的流資訊、切換流。

以下程式:獲取一個媒體檔案中流的數目以及流的資訊,並且可以切換音軌。

可以理解為:使用playbin播放媒體,然後從playbin中獲取以上資訊

#include "pch.h"
#include<string.h>
#include<stdio.h>
#include <gst/gst.h>

typedef struct _CustomData{
	GstElement *playbin;

	gint n_video;
	gint n_audio;
	gint n_text;

	gint current_video;
	gint current_audio;
	gint current_text;

	GMainLoop *main_loop;
}CustomData;

typedef enum {
	GST_PLAY_FLAG_VIDEO = (1 << 0), //we want video output
	GST_PALY_FLAG_AUDIO = (1 << 1),
	GST_PALY_FLAG_TEXT = (1 << 2)
}GstPlayFlags;

static gboolean handle_message(GstBus *bus, GstMessage *msg, CustomData *data);
static gboolean handle_keyboard(GIOChannel *source, GIOCondition cond, CustomData *data);

int main(int argc, char *argv[]) {
	CustomData data;
	GstBus *bus;
	GstStateChangeReturn ret;
	gint flags;
	GIOChannel *io_stdin;

	gst_init(&argc, &argv);

	data.playbin = gst_element_factory_make("playbin", "playbin");
	if (!data.playbin) {
		g_printerr("could not create playbin\n");
		return -1;
	}
	//file:///C:\Users\lenovo\Desktop\testVideo\[PGS][Tintin-004][DTS-AC3][5PGS].mkv
	g_object_set(data.playbin, "uri", "file:///C:/Users/lenovo/Desktop/testVideo/[PGS][Tintin-004][DTS-AC3][5PGS].mkv", NULL);

	g_object_get(data.playbin, "flags", &flags, NULL);
	flags |= GST_PLAY_FLAG_VIDEO | GST_PALY_FLAG_AUDIO;
	flags &= ~GST_PALY_FLAG_TEXT;
	g_object_set(data.playbin, "flags", flags, NULL);

	//connection-speed設定網路的最大連線速度
	g_object_set(data.playbin, "connection-speed", 56, NULL);
	//  我們逐個的設定這些屬性,但我們也可以僅呼叫g_object_set()一次,來設定uri,flags,connect-speed
	
	bus = gst_element_get_bus(data.playbin);
	gst_bus_add_watch(bus, (GstBusFunc)handle_message, &data);

	//這幾行連線了一個標準輸入(鍵盤)和一個回撥函式。這裡使用的機制是GLib的,並非是基於GStreamer的.
#ifdef G_OS_WIN32
	io_stdin = g_io_channel_win32_new_fd(_fileno(stdin));
#else
	io_stdin = g_io_channel_unix_new(fileno(stdin));
#endif
	g_io_add_watch(io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
	

	ret = gst_element_set_state(data.playbin, GST_STATE_PLAYING);
	if (ret == GST_STATE_CHANGE_FAILURE) {
		g_printerr("could not set sate to playing\n");
		gst_object_unref(data.playbin);
		return -1;
	}

	//為了互動,不再手動輪詢gstreamer匯流排,我們建立main_loop,並且使用了g_main_loop_run函式讓它執行起來。
	//,直到呼叫g_main_loop_quit()才被返回
	data.main_loop = g_main_loop_new(NULL,false);
	g_main_loop_run(data.main_loop);

	g_main_loop_unref(data.main_loop);
	g_io_channel_unref(io_stdin);
	gst_object_unref(bus);
	gst_element_set_state(data.playbin, GST_STATE_NULL);
	g_object_unref(data.playbin);
	return 0;
}

static void analyze_streams(CustomData *data) {
	gint i;
	GstTagList *tags;
	gchar *str;
	guint rate;

	g_object_get(data->playbin, "n-video", &data->n_video, NULL);
	g_object_get(data->playbin, "n-audio", &data->n_audio, NULL);
	g_object_get(data->playbin, "n-text", &data->n_text, NULL);

	g_print("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
		data->n_video, data->n_audio, data->n_text);

	g_print("\n");
	for (i = 0; i < data->n_video; i++) {
		tags = NULL;

		//現在,對於每一個流來說,我們需要獲得它的元資料。元資料是存在一個GstTagList的結構體裡面,
		//這個GstTagList通過g_signal_emit_by_name()可以把流裡面對應的tag都取出來。
		//然後可以用gst_tag_list_get_*這一類函式來訪問這些tag,這個例子中用的就是gst_tag_list_get_string()方法。

		//playbin定義了2個action訊號來獲得元資料:get-video-tags,get-audio-tags和get-text-tags。
		//在這個例子中我們關注的是GST_TAG_LANGUAGE_CODE這個tag和GST_TAG_ * _CODEC(audio,video和text)。
		g_signal_emit_by_name(data->playbin, "get-video-tags", i, &tags);

		if (tags) {
			g_print("video stream %d:\n", i);
			gst_tag_list_get_string(tags, GST_TAG_VIDEO_CODEC, &str);
			g_print("codec:%s\n", str ? str : "unknown");
			g_free(str);
			gst_tag_list_free(tags);
		}
	}

	g_print("\n");
	for (i = 0; i < data->n_audio; i++) {
		tags = NULL;
		g_signal_emit_by_name(data->playbin, "get-audio-tags", i, &tags);
		if (tags) {
			g_print("audio stream %d:\n", i);
			if (gst_tag_list_get_string(tags, GST_TAG_AUDIO_CODEC, &str)) {
				g_print("codec:%s\n", str);
				g_free(str);
			}
			if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
				g_print("  language: %s\n", str);
				g_free(str);
			}
			if (gst_tag_list_get_uint(tags, GST_TAG_BITRATE, &rate)) {
				g_print("  bitrate: %d\n", rate);
			}
			gst_tag_list_free(tags);
		}
	}
	g_print("\n");
	for (i = 0; i < data->n_text; i++) {
		tags = NULL;
		/* Retrieve the stream's subtitle tags */
		g_signal_emit_by_name(data->playbin, "get-text-tags", i, &tags);
		if (tags) {
			g_print("subtitle stream %d:\n", i);
			if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
				g_print("  language: %s\n", str);
				g_free(str);
			}
			gst_tag_list_free(tags);
		}
	}

	g_object_get(data->playbin, "current-video", &data->current_video, NULL);
	g_object_get(data->playbin, "current-audio", &data->current_audio, NULL);
	g_object_get(data->playbin, "current-text", &data->current_text, NULL);

	g_print("\n");
	g_print("Currently playing video stream %d, audio stream %d and text stream %d\n",
		data->current_video, data->current_audio, data->current_text);
	g_print("Type any number and hit ENTER to select a different audio stream\n");
}

static gboolean handle_message(GstBus *bus, GstMessage *msg, CustomData *data) {
	GError *err;
	gchar *debug_info;

	switch (GST_MESSAGE_TYPE(msg)) {
	case GST_MESSAGE_ERROR:
		gst_message_parse_error(msg, &err, &debug_info);
		g_printerr("Error recived from element %s:%s\n", GST_OBJECT_NAME(msg->src), err->message);
		g_printerr("debugging information:%s\n", debug_info ? debug_info : "none");
		g_clear_error(&err);
		g_free(debug_info);
		g_main_loop_quit(data->main_loop);
		break;
	case GST_MESSAGE_EOS:
		g_print("end of stream reched\n");
		g_main_loop_quit(data->main_loop);
		break;
	case GST_MESSAGE_STATE_CHANGED: {
		GstState old_state, new_state, pending_state;
		gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
		if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->playbin)) {
			if (new_state == GST_STATE_PLAYING) {
				analyze_streams(data);
			}
		}
		break;
	}
	}
	return TRUE;
}


static gboolean handle_keyboard(GIOChannel *source, GIOCondition cond, CustomData *data) {
	gchar *str = NULL;

	if (g_io_channel_read_line(source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
		int index = g_ascii_strtoull(str, NULL, 0);
		if (index < 0 || index >= data->n_audio) {
			g_printerr("Index out of bounds\n");
		}
		else {
			/* If the input was a valid audio stream index, set the current audio stream */
			g_print("Setting current audio stream to %d\n", index);
			g_object_set(data->playbin, "current-audio", index, NULL);
		}
	}
	g_free(str);
	return TRUE;
}

程式碼分解:

1、定義了一個結構體,包括playbin元件,音軌、視訊、字幕的數目等,以及一個main_loop。把這些資訊作為一個結構體來定義,是為了方便訪問,比如作為回撥函式的引數來傳遞就很方便。

typedef struct _CustomData{
	GstElement *playbin;

	gint n_video;
	gint n_audio;
	gint n_text;

	gint current_video;
	gint current_audio;
	gint current_text;

	GMainLoop *main_loop;
}CustomData;

2、定義一個列舉型別,包括playbin的一些標誌(flag),1<<0 ,就代表1的二進位制數左移0位。沒太理解這裡定義列舉型別的意義?  

typedef enum {
	GST_PLAY_FLAG_VIDEO = (1 << 0), //we want video output
	GST_PALY_FLAG_AUDIO = (1 << 1),
	GST_PALY_FLAG_TEXT = (1 << 2)
}GstPlayFlags;

3、mian函式,首先初始化,然後建立playbin

    CustomData data;
	GstBus *bus;
	GstStateChangeReturn ret;
	gint flags;
	GIOChannel *io_stdin;

	gst_init(&argc, &argv);

	data.playbin = gst_element_factory_make("playbin", "playbin");
	if (!data.playbin) {
		g_printerr("could not create playbin\n");
		return -1;
	}

4、設定playbin中的一些屬性:uri 、flags、connection-speed。

uri就是媒體地址啦,可以是網路地址,也可以是本地地址。

flags :還沒有弄清楚?????

GST_PLAY_FLAG_VIDEO 允許視訊渲染,如果這個標誌沒有設定,則沒有視訊輸出
GST_PLAY_FLAG_AUDIO 允許音訊渲染,如果這個標誌沒有設定,則沒有音訊輸出
GST_PLAY_FLAG_TEXT 允許字幕渲染,如果這個標誌沒有設定,則沒有字幕顯示
GST_PLAY_FLAG_VIS 允許在沒有視訊流時進行視覺化渲染,後面教程會講到
GST_PLAY_FLAG_DOWNLOAD 參見《GStreamer基礎教程12——流》以及後續教程
GST_PLAY_FLAG_BUFFERING 參見《GStreamer基礎教程12——流》以及後續教程
GST_PLAY_FLAG_DEINTERLACE 如果視訊是隔行掃描的,那麼在顯示時改成逐行掃描

connect-speed:設定網路的最大連線速度,文件說的是 為了防止伺服器有多個版本的媒體檔案,playbin會選擇合適的。這裡也沒有很清楚。

        g_object_set(data.playbin, "uri", "file:///C:/Users/lenovo/Desktop/testVideo/[PGS][Tintin-004][DTS-AC3][5PGS].mkv", NULL);

	g_object_get(data.playbin, "flags", &flags, NULL);
	flags |= GST_PLAY_FLAG_VIDEO | GST_PALY_FLAG_AUDIO;
	flags &= ~GST_PALY_FLAG_TEXT;
	g_object_set(data.playbin, "flags", flags, NULL);

	//connection-speed設定網路的最大連線速度
	g_object_set(data.playbin, "connection-speed", 56, NULL);

當然也可以使用一個g_object_set函式來設定所有的屬性:

g_object_set(data->playbin, "uri", "file:///c:/filename", "flags", flag, "connect_speed", 56, NULL)

 5、監聽匯流排,設定回撥函式handle_message。

	bus = gst_element_get_bus(data.playbin);
	gst_bus_add_watch(bus, (GstBusFunc)handle_message, &data);

 6、看一下回調函式handle_message。

有三個引數:bus、msg、data。從main函式中傳回來就是gst_bus_add_watch中的bus,?,data。這個msg是從哪裡來的,一直沒搞清楚。

使用switch判斷msg,如果是GST_MESSAGE_ERROR或者GST_MESSAGE_EOS就呼叫g_main_loop_quit()結束loop。

如果是GST_MESSAGE_STATE_CHANGED,也就是狀態改變到playing,相當於初次開啟媒體檔案,並且開始播放。就呼叫analyze_streams()來分析流資訊。

analyze_stream()接下來分析。g_main_loop_quit()接下來也會解釋。

static gboolean handle_message(GstBus *bus, GstMessage *msg, CustomData *data) {
	GError *err;
	gchar *debug_info;

	switch (GST_MESSAGE_TYPE(msg)) {
	case GST_MESSAGE_ERROR:
		gst_message_parse_error(msg, &err, &debug_info);
		g_printerr("Error recived from element %s:%s\n", GST_OBJECT_NAME(msg->src), err->message);
		g_printerr("debugging information:%s\n", debug_info ? debug_info : "none");
		g_clear_error(&err);
		g_free(debug_info);
		g_main_loop_quit(data->main_loop);
		break;
	case GST_MESSAGE_EOS:
		g_print("end of stream reched\n");
		g_main_loop_quit(data->main_loop);
		break;
	case GST_MESSAGE_STATE_CHANGED: {
		GstState old_state, new_state, pending_state;
		gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
		if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->playbin)) {
			if (new_state == GST_STATE_PLAYING) {
				analyze_streams(data);
			}
		}
		break;
	}
	}
	return TRUE;
}

7、這幾行連線了一個標準輸入(鍵盤)和一個回撥函式。這裡使用的機制是GLib的,並非是基於GStreamer的.

最後是設定了回撥函式handle_keyboard。

主要是這樣的:在播放過程中隨時等待使用者在命令列輸入內容。handle_keyboard的作用就是在獲取到輸入以後做出判斷並執行相應操作。

#ifdef G_OS_WIN32
	io_stdin = g_io_channel_win32_new_fd(_fileno(stdin));
#else
	io_stdin = g_io_channel_unix_new(fileno(stdin));
#endif
	g_io_add_watch(io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);

8、將狀態設定為playing,並且設定了main_loop。使用g_main_loop_rund的意思就是一直迴圈著,直到遇到g_main_loop_quit()才結束。

        ret = gst_element_set_state(data.playbin, GST_STATE_PLAYING);
	if (ret == GST_STATE_CHANGE_FAILURE) {
		g_printerr("could not set sate to playing\n");
		gst_object_unref(data.playbin);
		return -1;
	}

	//為了互動,不再手動輪詢gstreamer匯流排,我們建立main_loop,並且使用了g_main_loop_run函式讓它執行起來。
	//,直到呼叫g_main_loop_quit()才被返回
	data.main_loop = g_main_loop_new(NULL,false);
	g_main_loop_run(data.main_loop);

9、最後就是釋放資源

        g_main_loop_unref(data.main_loop);
	g_io_channel_unref(io_stdin);
	gst_object_unref(bus);
	gst_element_set_state(data.playbin, GST_STATE_NULL);
	g_object_unref(data.playbin);

10、analyze_stream()就是真正獲取流資訊的地方。

主要是用g_object_get()函式來獲取流數目,當前流。

用g_signal_emit_by_name()來獲取流的tags,然後用gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)來獲取tags中的語言等資訊。

static void analyze_streams(CustomData *data) {
	gint i;
	GstTagList *tags;
	gchar *str;
	guint rate;

	g_object_get(data->playbin, "n-video", &data->n_video, NULL);
	g_object_get(data->playbin, "n-audio", &data->n_audio, NULL);
	g_object_get(data->playbin, "n-text", &data->n_text, NULL);

	g_print("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
		data->n_video, data->n_audio, data->n_text);

	g_print("\n");
	for (i = 0; i < data->n_video; i++) {
		tags = NULL;

		//現在,對於每一個流來說,我們需要獲得它的元資料。元資料是存在一個GstTagList的結構體裡面,
		//這個GstTagList通過g_signal_emit_by_name()可以把流裡面對應的tag都取出來。
		//然後可以用gst_tag_list_get_*這一類函式來訪問這些tag,這個例子中用的就是gst_tag_list_get_string()方法。

		//playbin定義了2個action訊號來獲得元資料:get-video-tags,get-audio-tags和get-text-tags。
		//在這個例子中我們關注的是GST_TAG_LANGUAGE_CODE這個tag和GST_TAG_ * _CODEC(audio,video和text)。
		g_signal_emit_by_name(data->playbin, "get-video-tags", i, &tags);

		if (tags) {
			g_print("video stream %d:\n", i);
			gst_tag_list_get_string(tags, GST_TAG_VIDEO_CODEC, &str);
			g_print("codec:%s\n", str ? str : "unknown");
			g_free(str);
			gst_tag_list_free(tags);
		}
	}

	g_print("\n");
	for (i = 0; i < data->n_audio; i++) {
		tags = NULL;
		g_signal_emit_by_name(data->playbin, "get-audio-tags", i, &tags);
		if (tags) {
			g_print("audio stream %d:\n", i);
			if (gst_tag_list_get_string(tags, GST_TAG_AUDIO_CODEC, &str)) {
				g_print("codec:%s\n", str);
				g_free(str);
			}
			if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
				g_print("  language: %s\n", str);
				g_free(str);
			}
			if (gst_tag_list_get_uint(tags, GST_TAG_BITRATE, &rate)) {
				g_print("  bitrate: %d\n", rate);
			}
			gst_tag_list_free(tags);
		}
	}
	g_print("\n");
	for (i = 0; i < data->n_text; i++) {
		tags = NULL;
		/* Retrieve the stream's subtitle tags */
		g_signal_emit_by_name(data->playbin, "get-text-tags", i, &tags);
		if (tags) {
			g_print("subtitle stream %d:\n", i);
			if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
				g_print("  language: %s\n", str);
				g_free(str);
			}
			gst_tag_list_free(tags);
		}
	}

	g_object_get(data->playbin, "current-video", &data->current_video, NULL);
	g_object_get(data->playbin, "current-audio", &data->current_audio, NULL);
	g_object_get(data->playbin, "current-text", &data->current_text, NULL);

	g_print("\n");
	g_print("Currently playing video stream %d, audio stream %d and text stream %d\n",
		data->current_video, data->current_audio, data->current_text);
	g_print("Type any number and hit ENTER to select a different audio stream\n");
}