最近接到的一个任务是,利用gstreamer1.0抓取framebuffer的fb0设备的RGB图像数据,进行h264编码,然后再转换为MP4视频;硬件平台式imx6qdl,内含IPU与VPU硬件,Linux内核版本是yocto project 4.1.15。
IPU:将RGB数据转为YUV格式,因为VPU只能接收YUV格式数据转换为H264
VPU:将YUV格式的数据转换为H264码流
关于gstreamer的概念,这里就不在详述,可以参考其开发手册与官网实例。尤其比较有用的工具是gst-inspect-1.0可以查看gstreamer所有的元件elements,或者gst-inspect-1.0 后跟元件名称,可以单独查看某一元件的详细信息。另外可以使用gst-lanuch-1.0工具,先进行测试,例如:
gst-launch-1.0ximagesrc remote=1 use-damage=0 ! video/x-raw,framerate=25/1 !videoconvert ! vp8enc !
matroskamux ! filesinklocation=/home/luke/test.mkv
可以实现在ubuntu16.04上进行录屏
根据应用需求,需要使用gstreamer1.0的appsrc、imxvideoconvert_ipu、vpuenc_h264、appsink、filesink、mp4mux共6个元件。现将RGB转为H264码,在PC上使用ffmpeg 工具进行转换测试;转换命令:ffmpeg -f h264 -i test_15x30.h264 -vcodec copy test_15x30.mp4代码如下:
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <linux/fb.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <signal.h>
typedef struct _GstDataStruct{
GstElement *pipeline;
GstElement *app_source;
GstElement *video_convert;
GstElement *auto_video_convert;
GstElement *h264_encoder;
GstElement *jpgenc;
GstElement *mp4mux;
GstElement *file_sink;
GstElement *app_sink;
guint sourceid; /* To control the GSource */
guint app_src_index;
guint app_sink_index;
GMainLoop *loop; /* GLib's Main Loop */
} GstDataStruct;
typedef struct
{
struct fb_var_screeninfo vinfo; // 可变的显示屏幕信息
int width; // width 宽度
int height; // height 高度
int bpp; // bit per pixel 像素的位数
int rowsize; // 屏幕一行所占字节数
int real_len; // 实际显示区域的字节数
int total_len; // 显示区域总字节数,即长度
int offset; // 实际显示位置在总长度中的偏移量
int fd; // 打开fb节点时的文件句柄
void* data; // fb0 节点内存映射后数据指针
}FbInfoStruct;
static void new_h264_sample_on_appsink (GstElement *sink, GstDataStruct *pGstData);
static void eos_on_appsink (GstElement *sink, GstDataStruct *pGstData);
static gboolean read_data(GstDataStruct *pGstData);
static void start_feed (GstElement * pipeline, guint size, GstDataStruct *pGstData);
static void stop_feed (GstElement * pipeline, GstDataStruct *pGstData);
gboolean bus_msg_call(GstBus *bus, GstMessage *msg, GstDataStruct *pGstData);
static FILE *fp; // 打开或保存文件的的指针
static GstBus *bus;
static GstDataStruct GstData;
static FbInfoStruct FbInfo;
int main(int argc, char *argv[])
{
guint bus_watch_id;
char file_name[32];
int frame_rate = 10;
int gop_size = 30;
if(argc == 3)
{
frame_rate = atoi(argv[1]);
gop_size = atoi(argv[2]);
}
memset (file_name, 0, sizeof(file_name));
sprintf(file_name, "%s", "/home/root/test_");
file_name[16] = frame_rate/10 + 0x30;
file_name[17] = frame_rate%10 + 0x30;
file_name[18] = 'x';
file_name[19] = gop_size/10 + 0x30;
file_name[20] = gop_size%10 + 0x30;
file_name[21] = '.';
file_name[22] = 'h';
file_name[23] = '2';
file_name[24] = '6';
file_name[25] = '4';
printf("file name is :%s\n", file_name);
printf("frame_rate:%d gop-size:%d\n", frame_rate, gop_size);
fp = fopen(file_name, "wb");
if(!fp)
{
printf("can not open file %s", file_name);
return -1;
}
printf("================ imx60 360 main start ==============\n");
memset (&GstData, 0, sizeof (GstDataStruct));
memset (&FbInfo, 0, sizeof (FbInfoStruct));
printf("=========== imx60 360 get fb0 info start ===========\n");
FbInfo.fd = open("/dev/fb0", O_RDWR);
ioctl(FbInfo.fd, FBIOGET_VSCREENINFO, &FbInfo.vinfo);
FbInfo.width = FbInfo.vinfo.xres;
FbInfo.height = FbInfo.vinfo.yres;
FbInfo.bpp = FbInfo.vinfo.bits_per_pixel;
FbInfo.rowsize = FbInfo.width * (FbInfo.bpp >> 3);
FbInfo.offset = FbInfo.rowsize * FbInfo.vinfo.yoffset;
FbInfo.total_len = FbInfo.vinfo.xres_virtual * FbInfo.vinfo.yres_virtual * (FbInfo.bpp >> 3);
FbInfo.real_len = FbInfo.width * FbInfo.height * (FbInfo.bpp >> 3);
FbInfo.data = mmap (NULL, FbInfo.total_len, PROT_READ | PROT_WRITE, MAP_SHARED, FbInfo.fd, 0);
printf("================= var screen info =================\n");
printf(" sz [%d x %d] %d\n", FbInfo.width, FbInfo.height, FbInfo.bpp);
printf(" vsz [%d x %d]\n", FbInfo.vinfo.xres_virtual, FbInfo.vinfo.yres_virtual);
printf(" pan : (%d, %d)\n", FbInfo.vinfo.xoffset, FbInfo.vinfo.yoffset);
printf(" off : %d\n", FbInfo.offset);
printf(" map : %p\n", FbInfo.data);
printf("============ imx60 360 get fb0 info end ===========\n");
/* Initialize cumstom data structure */
printf("============= imx60 360 gst init start ============\n");
gst_init (&argc, &argv);
/* Create gstreamer elements */
printf("=========== create imx60 360 pipeline =============\n");
GstData.pipeline = gst_pipeline_new ("imx60_360");
GstData.app_source = gst_element_factory_make ("appsrc", "video-source");
GstData.video_convert = gst_element_factory_make ("imxvideoconvert_ipu", "video_convert");
GstData.h264_encoder = gst_element_factory_make ("vpuenc_h264", "video-encoder");
GstData.app_sink = gst_element_factory_make ("appsink", "fake");
if (!GstData.pipeline || !GstData.app_source || !GstData.h264_encoder ||
!GstData.video_convert || !GstData.app_sink)
{
g_printerr ("One element could not be created... Exit\n");
munmap(FbInfo.data, FbInfo.total_len);
close(FbInfo.fd);
fclose(fp);
return -1;
}
printf("============ link imx60 360 pipeline ==============\n");
char szTemp[64];
sprintf(szTemp, "%d", FbInfo.width * FbInfo.height * (FbInfo.bpp >> 3));
g_object_set(G_OBJECT(GstData.app_source), "blocksize", szTemp, NULL);
g_object_set(G_OBJECT(GstData.app_source), "do-timestamp", TRUE, NULL);
g_object_set(G_OBJECT(GstData.app_source), "stream-type", 0, "format", GST_FORMAT_TIME, NULL);
g_object_set(G_OBJECT(GstData.app_source), "min-percent", 3, NULL);
GstCaps *appsrc_caps;
appsrc_caps = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING,"BGRx",
"width", G_TYPE_INT, FbInfo.width,
"height", G_TYPE_INT, FbInfo.height,
"framerate",GST_TYPE_FRACTION, frame_rate, 1, NULL);
GstCaps *caps_app_sink;
caps_app_sink = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "byte-stream",
"alignment", G_TYPE_STRING, "au", NULL);
g_object_set(G_OBJECT(GstData.app_source), "caps", appsrc_caps, NULL);
g_object_set(G_OBJECT(GstData.h264_encoder), "bitrate", 1000, NULL);
g_object_set(G_OBJECT(GstData.h264_encoder), "quant", 51, NULL);
g_object_set(G_OBJECT(GstData.h264_encoder), "gop-size", gop_size, NULL);
g_object_set(G_OBJECT(GstData.app_sink), "emit-signals", TRUE, "caps", caps_app_sink, "sync", FALSE, NULL);
bus = gst_pipeline_get_bus(GST_PIPELINE(GstData.pipeline));
bus_watch_id = gst_bus_add_watch(bus, (GstBusFunc)bus_msg_call, (gpointer)&GstData);
gst_object_unref(bus);
g_signal_connect(GstData.app_source, "need-data", G_CALLBACK(start_feed), &GstData);
g_signal_connect(GstData.app_source, "enough-data", G_CALLBACK(stop_feed), &GstData);
g_signal_connect(GstData.app_sink, "new-sample", G_CALLBACK(new_h264_sample_on_appsink), &GstData);
g_signal_connect(GstData.app_sink, "eos", G_CALLBACK(eos_on_appsink), &GstData);
gst_bin_add_many(GST_BIN(GstData.pipeline), GstData.app_source, GstData.video_convert,
GstData.h264_encoder, GstData.app_sink, NULL);
if(gst_element_link_filtered(GstData.app_source, GstData.video_convert, appsrc_caps) != TRUE)
{
g_printerr ("GstData.app_source could not link GstData.video_convert\n");
gst_object_unref (GstData.pipeline);
munmap(FbInfo.data, FbInfo.total_len);
close(FbInfo.fd);
fclose(fp);
return -1;
}
gst_caps_unref (appsrc_caps);
if(gst_element_link(GstData.video_convert, GstData.h264_encoder) != TRUE)
{
g_printerr ("GstData.video_convert could not link GstData.h264_encoder\n");
gst_object_unref (GstData.pipeline);
munmap(FbInfo.data, FbInfo.total_len);
close(FbInfo.fd);
fclose(fp);
return -1;
}
if(gst_element_link_filtered(GstData.h264_encoder, GstData.app_sink, caps_app_sink) != TRUE)
{
g_printerr ("GstData.h264_encoder could not link GstData.file_sink\n");
gst_object_unref (GstData.pipeline);
munmap(FbInfo.data, FbInfo.total_len);
close(FbInfo.fd);
fclose(fp);
return -1;
}
gst_caps_unref (caps_app_sink);
GstData.app_src_index = 0;
GstData.app_sink_index = 0;
gst_element_set_state (GstData.pipeline, GST_STATE_PLAYING);
GstData.loop = g_main_loop_new(NULL, FALSE); // Create gstreamer loop
g_main_loop_run(GstData.loop); // Loop will run until receiving EOS (end-of-stream), will block here
fprintf(stderr, "g_main_loop_run returned, stopping record\n");
// Free resources
gst_element_set_state (GstData.pipeline, GST_STATE_NULL); // Stop pipeline to be released
fprintf(stderr, "Deleting pipeline\n");
gst_object_unref (GstData.pipeline); // THis will also delete all pipeline elements
g_source_remove(bus_watch_id);
g_main_loop_unref(GstData.loop);
munmap(FbInfo.data, FbInfo.total_len);
close(FbInfo.fd);
fclose(fp);
return 0;
}
static void eos_on_appsink (GstElement *sink, GstDataStruct *pGstData)
{
printf("appsink get signal eos !!!\n");
}
static void new_h264_sample_on_appsink (GstElement *sink, GstDataStruct *pGstData)
{
GstSample *sample = NULL;
g_signal_emit_by_name (sink, "pull-sample", &sample);
if(sample)
{
pGstData->app_sink_index++;
GstBuffer *buffer = gst_sample_get_buffer(sample);
GstMapInfo info;
if(gst_buffer_map((buffer), &info, GST_MAP_READ))
{
g_print ("h264 Streaming Buffer is Comming\n");
// Here to get h264 buffer data with info.data and get h264 buffer size with info.size
//gst_util_dump_mem (info.data, info.size);
fwrite(info.data, info.size, 1, fp);
g_print ("h264 Streaming Buffer is wrote, len:%d, index:%d\n", (int)info.size, pGstData->app_sink_index);
gst_buffer_unmap(buffer, &info);
gst_sample_unref (sample);
}
}
}
/* This method is called by the idle GSource in the mainloop. We feed CHUNK_SIZE
* bytes into appsrc.
* The ide handler is added to the mainloop when appsrc requests us to start
* sending data (need-data signal) and is removed when appsrc has enough data
* (enough-data signal).
*/
static gboolean read_data (GstDataStruct *pGstData)
{
GstFlowReturn ret;
GstBuffer *buffer;
GstMemory *memory;
pGstData->app_src_index++;
if (pGstData->app_src_index > 500)
{
g_signal_emit_by_name (pGstData->app_source, "end-of-stream", &ret);
//ret = gst_app_src_end_of_stream(GST_APP_SRC(pGstData->app_source));
g_debug("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}
buffer = gst_buffer_new();
memory = gst_memory_new_wrapped(GST_MEMORY_FLAG_READONLY, FbInfo.data, FbInfo.total_len, FbInfo.offset, FbInfo.real_len, NULL, NULL);
gst_buffer_append_memory (buffer, memory);
g_signal_emit_by_name (pGstData->app_source, "push-buffer", buffer, &ret);
//GST_DEBUG ("feed buffer %p, offset %" G_GUINT64_FORMAT "-%u", buffer,FbInfo.offset, FbInfo.real_len);
gst_buffer_unref(buffer);
if (ret != GST_FLOW_OK)
{
g_debug("push buffer returned %d for %d bytes \n", ret, FbInfo.real_len);
return FALSE;
}
return TRUE;
}
static void start_feed (GstElement * pipeline, guint size, GstDataStruct *pGstData)
{
g_print("start feed...................\n");
if (pGstData->sourceid == 0) {
//GST_DEBUG ("start feeding");
pGstData->sourceid = g_idle_add ((GSourceFunc) read_data, pGstData);
}
}
static void stop_feed (GstElement * pipeline, GstDataStruct *pGstData)
{
g_print("stop feed...................\n");
if (pGstData->sourceid != 0) {
//GST_DEBUG ("stop feeding");
g_source_remove (pGstData->sourceid);
pGstData->sourceid = 0;
}
}
// Bus messages processing, similar to all gstreamer examples
gboolean bus_msg_call(GstBus *bus, GstMessage *msg, GstDataStruct *pGstData)
{
gchar *debug;
GError *error;
GMainLoop *loop = pGstData->loop;
GST_DEBUG ("got message %s",gst_message_type_get_name (GST_MESSAGE_TYPE (msg)));
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
fprintf(stderr, "End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
default:
break;
}
return TRUE;
}
以上代码先使用appsink元件挂接的回调函数,static void new_h264_sample_on_appsink (GstElement *sink, GstDataStruct *pGstData)讲获取编码后的h264帧保存为h264文件
正确获取h264帧后,改用filesink与mp4mux元件,直接保存为MP4视频文件。代码如下:
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <linux/fb.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <signal.h>
typedef struct _GstDataStruct{
GstElement *pipeline;
GstElement *app_source;
GstElement *video_convert;
GstElement *auto_video_convert;
GstElement *h264_encoder;
GstElement *jpgenc;
GstElement *mp4mux;
GstElement *file_sink;
GstElement *app_sink;
guint sourceid; /* To control the GSource */
guint app_src_index;
guint app_sink_index;
GMainLoop *loop; /* GLib's Main Loop */
} GstDataStruct;
typedef struct
{
struct fb_var_screeninfo vinfo; // 可变的显示屏幕信息
int width; // width 宽度
int height; // height 高度
int bpp; // bit per pixel 像素的位数
int rowsize; // 屏幕一行所占字节数
int real_len; // 实际显示区域的字节数
int total_len; // 显示区域总字节数,即长度
int offset; // 实际显示位置在总长度中的偏移量
int fd; // 打开fb节点时的文件句柄
void* data; // fb0 节点内存映射后数据指针
}FbInfoStruct;
static void new_h264_sample_on_appsink (GstElement *sink, GstDataStruct *pGstData);
static void eos_on_appsink (GstElement *sink, GstDataStruct *pGstData);
static gboolean read_data(GstDataStruct *pGstData);
static void start_feed (GstElement * pipeline, guint size, GstDataStruct *pGstData);
static void stop_feed (GstElement * pipeline, GstDataStruct *pGstData);
gboolean bus_msg_call(GstBus *bus, GstMessage *msg, GstDataStruct *pGstData);
static FILE *fp; // 打开或保存文件的的指针
static GstBus *bus;
static GstDataStruct GstData;
static FbInfoStruct FbInfo;
int main(int argc, char *argv[])
{
guint bus_watch_id;
char file_name[32];
int frame_rate = 15;
int gop_size = 30;
if((argc > 1) && (argc < 4))
{
frame_rate = atoi(argv[1]);
gop_size = atoi(argv[2]);
printf("frame_rate:%d gop-size:%d\n", frame_rate, gop_size);
memset (file_name, 0, sizeof(file_name));
sprintf(file_name, "%s", "/home/root/test_");
file_name[16] = frame_rate/10 + 0x30;
file_name[17] = frame_rate%10 + 0x30;
file_name[18] = 'x';
file_name[19] = gop_size/10 + 0x30;
file_name[20] = gop_size%10 + 0x30;
file_name[21] = '.';
file_name[22] = 'm';
file_name[23] = 'p';
file_name[24] = '4';
printf("file name is :%s\n", file_name);
}
else
{
printf("Usage : %s frame_rate gop-size\n", argv[0]);
return 1;
}
printf("================ imx60 360 main start ==============\n");
memset (&GstData, 0, sizeof (GstDataStruct));
memset (&FbInfo, 0, sizeof (FbInfoStruct));
printf("========== imx60 360 get fb0 info start ==========\n");
FbInfo.fd = open("/dev/fb0", O_RDWR);
ioctl(FbInfo.fd, FBIOGET_VSCREENINFO, &FbInfo.vinfo);
FbInfo.width = FbInfo.vinfo.xres;
FbInfo.height = FbInfo.vinfo.yres;
FbInfo.bpp = FbInfo.vinfo.bits_per_pixel;
FbInfo.rowsize = FbInfo.width * (FbInfo.bpp >> 3);
FbInfo.offset = FbInfo.rowsize * FbInfo.vinfo.yoffset;
FbInfo.total_len = FbInfo.vinfo.xres_virtual * FbInfo.vinfo.yres_virtual * (FbInfo.bpp >> 3);
FbInfo.real_len = FbInfo.width * FbInfo.height * (FbInfo.bpp >> 3);
FbInfo.data = mmap (NULL, FbInfo.total_len, PROT_READ | PROT_WRITE, MAP_SHARED, FbInfo.fd, 0);
printf("================= var screen info =================\n");
printf(" sz [%d x %d] %d\n", FbInfo.width, FbInfo.height, FbInfo.bpp);
printf(" vsz [%d x %d]\n", FbInfo.vinfo.xres_virtual, FbInfo.vinfo.yres_virtual);
printf(" pan : (%d, %d)\n", FbInfo.vinfo.xoffset, FbInfo.vinfo.yoffset);
printf(" off : %d\n", FbInfo.offset);
printf(" map : %p\n", FbInfo.data);
printf("============ imx60 360 get fb0 info end ===========\n");
/* Initialize cumstom data structure */
printf("============= imx60 360 gst init start ============\n");
gst_init (&argc, &argv);
/* Create gstreamer elements */
printf("=========== create imx60 360 pipeline =============\n");
GstData.pipeline = gst_pipeline_new ("imx60_360");
GstData.app_source = gst_element_factory_make ("appsrc", "video-source");
GstData.video_convert = gst_element_factory_make ("imxvideoconvert_ipu", "video_convert");
GstData.auto_video_convert = gst_element_factory_make ("autovideoconvert", "auto-video_convert");
GstData.h264_encoder = gst_element_factory_make ("vpuenc_h264", "video-encoder");
GstData.file_sink = gst_element_factory_make ("filesink", "file-sink");
GstData.app_sink = gst_element_factory_make ("appsink", "fake");
GstData.mp4mux = gst_element_factory_make ("mp4mux", "mp4_mux");
if (!GstData.pipeline || !GstData.app_source || !GstData.h264_encoder ||
!GstData.video_convert || !GstData.file_sink || !GstData.mp4mux)
{
g_printerr ("One element could not be created... Exit\n");
munmap(FbInfo.data, FbInfo.total_len);
close(FbInfo.fd);
return -1;
}
printf("============ link imx60 360 pipeline ==============\n");
char szTemp[64];
sprintf(szTemp, "%d", FbInfo.width * FbInfo.height * (FbInfo.bpp >> 3));
g_object_set(G_OBJECT(GstData.app_source), "blocksize", szTemp, NULL);
g_object_set(G_OBJECT(GstData.app_source), "do-timestamp", TRUE, NULL);
g_object_set(G_OBJECT(GstData.app_source), "stream-type", 0, "format", GST_FORMAT_TIME, NULL);
g_object_set(G_OBJECT(GstData.app_source), "min-percent", 3, NULL);
GstCaps *appsrc_caps;
appsrc_caps = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING,"BGRx",
"width", G_TYPE_INT, FbInfo.width,
"height", G_TYPE_INT, FbInfo.height,
"framerate",GST_TYPE_FRACTION, frame_rate, 1, NULL);
g_object_set(G_OBJECT(GstData.app_source), "caps", appsrc_caps, NULL);
g_object_set(G_OBJECT(GstData.h264_encoder), "bitrate", 500, NULL);
g_object_set(G_OBJECT(GstData.h264_encoder), "gop-size", gop_size, NULL);
g_object_set(G_OBJECT(GstData.file_sink), "location", file_name, NULL);
bus = gst_pipeline_get_bus(GST_PIPELINE(GstData.pipeline));
bus_watch_id = gst_bus_add_watch(bus, (GstBusFunc)bus_msg_call, (gpointer)&GstData);
gst_object_unref(bus);
g_signal_connect(GstData.app_source, "need-data", G_CALLBACK(start_feed), &GstData);
g_signal_connect(GstData.app_source, "enough-data", G_CALLBACK(stop_feed), &GstData);
gst_bin_add_many(GST_BIN(GstData.pipeline), GstData.app_source, GstData.video_convert,
GstData.h264_encoder, GstData.mp4mux, GstData.file_sink, NULL);
GstCaps *caps_to_convert_sink;
caps_to_convert_sink = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGRx",
"width", G_TYPE_INT, FbInfo.width,
"height", G_TYPE_INT, FbInfo.height,
"framerate", GST_TYPE_FRACTION, frame_rate, 1, NULL);
if(gst_element_link_filtered(GstData.app_source, GstData.video_convert, caps_to_convert_sink) != TRUE)
{
g_printerr ("GstData.app_source could not link GstData.video_convert\n");
gst_object_unref (GstData.pipeline);
munmap(FbInfo.data, FbInfo.total_len);
close(FbInfo.fd);
return -1;
}
gst_caps_unref (caps_to_convert_sink);
if(gst_element_link(GstData.video_convert, GstData.h264_encoder) != TRUE)
{
g_printerr ("GstData.video_convert could not link GstData.h264_encoder\n");
gst_object_unref (GstData.pipeline);
munmap(FbInfo.data, FbInfo.total_len);
close(FbInfo.fd);
return -1;
}
GstCaps *caps_to_mp4mux_sink;
caps_to_mp4mux_sink = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "avc",
"alignment", G_TYPE_STRING, "au",
"width", G_TYPE_INT, FbInfo.width,
"height", G_TYPE_INT, FbInfo.height, NULL);
if(gst_element_link_filtered(GstData.h264_encoder, GstData.mp4mux, caps_to_mp4mux_sink) != TRUE)
{
g_printerr ("GstData.h264_encoder could not link GstData.mp4mux\n");
gst_object_unref (GstData.pipeline);
munmap(FbInfo.data, FbInfo.total_len);
close(FbInfo.fd);
return -1;
}
gst_caps_unref (caps_to_mp4mux_sink);
if(gst_element_link(GstData.mp4mux, GstData.file_sink) != TRUE)
{
g_printerr ("GstData.mp4mux could not link GstData.file_sink\n");
gst_object_unref (GstData.pipeline);
munmap(FbInfo.data, FbInfo.total_len);
close(FbInfo.fd);
return -1;
}
GstData.index = 0;
gst_element_set_state (GstData.pipeline, GST_STATE_PLAYING);
GstData.loop = g_main_loop_new(NULL, FALSE); // Create gstreamer loop
g_main_loop_run(GstData.loop); // Loop will run until receiving EOS (end-of-stream), will block here
fprintf(stderr, "g_main_loop_run returned, stopping record\n");
// Free resources
gst_element_set_state (GstData.pipeline, GST_STATE_NULL); // Stop pipeline to be released
fprintf(stderr, "Deleting pipeline\n");
gst_object_unref (GstData.pipeline); // THis will also delete all pipeline elements
g_source_remove(bus_watch_id);
g_main_loop_unref(GstData.loop);
munmap(FbInfo.data, FbInfo.total_len);
close(FbInfo.fd);
return 0;
}
static void eos_on_appsink (GstElement *sink, GstDataStruct *pGstData)
{
printf("appsink get signal eos !!!\n");
}
/* This method is called by the idle GSource in the mainloop. We feed CHUNK_SIZE
* bytes into appsrc.
* The ide handler is added to the mainloop when appsrc requests us to start
* sending data (need-data signal) and is removed when appsrc has enough data
* (enough-data signal).
*/
static gboolean read_data (GstDataStruct *pGstData)
{
GstFlowReturn ret;
GstBuffer *buffer;
GstMemory *memory;
pGstData->app_src_index++;
if (pGstData->app_src_index > 500)
{
g_signal_emit_by_name (pGstData->app_source, "end-of-stream", &ret);
//ret = gst_app_src_end_of_stream(GST_APP_SRC(pGstData->app_source));
g_debug("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}
buffer = gst_buffer_new();
memory = gst_memory_new_wrapped(GST_MEMORY_FLAG_READONLY, FbInfo.data, FbInfo.total_len, FbInfo.offset, FbInfo.real_len, NULL, NULL);
gst_buffer_append_memory (buffer, memory);
g_signal_emit_by_name (pGstData->app_source, "push-buffer", buffer, &ret);
//GST_DEBUG ("feed buffer %p, offset %" G_GUINT64_FORMAT "-%u", buffer,FbInfo.offset, FbInfo.real_len);
gst_buffer_unref(buffer);
if (ret != GST_FLOW_OK)
{
g_debug("push buffer returned %d for %d bytes \n", ret, FbInfo.real_len);
return FALSE;
}
return TRUE;
}
static void start_feed (GstElement * pipeline, guint size, GstDataStruct *pGstData)
{
g_print("start feed...................\n");
if (pGstData->sourceid == 0) {
//GST_DEBUG ("start feeding");
pGstData->sourceid = g_idle_add ((GSourceFunc) read_data, pGstData);
}
}
static void stop_feed (GstElement * pipeline, GstDataStruct *pGstData)
{
g_print("stop feed...................\n");
if (pGstData->sourceid != 0) {
//GST_DEBUG ("stop feeding");
g_source_remove (pGstData->sourceid);
pGstData->sourceid = 0;
}
}
// Bus messages processing, similar to all gstreamer examples
gboolean bus_msg_call(GstBus *bus, GstMessage *msg, GstDataStruct *pGstData)
{
gchar *debug;
GError *error;
GMainLoop *loop = pGstData->loop;
GST_DEBUG ("got message %s",gst_message_type_get_name (GST_MESSAGE_TYPE (msg)));
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
fprintf(stderr, "End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
default:
break;
}
return TRUE;
}
以上代码已经过测试,可以正常运行,注意代码中有500帧的限制,到达500帧后,主动发出EOS(end of stream)信号,结束代码运行;代码运行时CPU的占用率在%53——%58之间,已经明确使用了 imx6qdCPU内的IPU与VPU硬件。对应的gstreamer元件为:imxvideoconvert_ipu、vpuenc_h264。
这一部分就先写到这吧,欢迎大神看到后能给点评论指导。