1
가 비디오를 재생하는 GST-출시-1.0를 사용하는 경우, 우리는 프레임 속도 (FPS) --padprobe V를 추가하여 측정 할 수 있습니다 : 예를 들어 다음과 같이 --timer 및 이름 = V 싱크 :표시 FPS는
gst-launch-1.0 -e --padprobe v:sink --timer filesrc location=video-h264-bl10-fhd-30p-5m-aac-lc-stereo-124k-48000x264.mp4 \
! qtdemux ! queue ! omxh264dec ! \
vspfilter ! video/x-raw,format=BGRA ! waylandsink position-x=0 position-y=0 \
out-width=800 out-height=480 qos=false max-lateness=-1 name=v
그리고 콘솔에 출력 내가 C의 langugage 작성 gstreamer를 소스 코드에서 비슷한 일을 할 수있는 방법
FPS: 9 TIME 11:57:47
FPS: 8 TIME 11:57:48
FPS: 8 TIME 11:57:49
FPS: 8 TIME 11:57:50
FPS: 9 TIME 11:57:51
FPS: 8 TIME 11:57:52
FPS: 8 TIME 11:57:53
FPS: 8 TIME 11:57:54
FPS: 9 TIME 11:57:55
FPS: 8 TIME 11:57:56
FPS: 8 TIME 11:57:57
FPS: 9 TIME 11:57:58
FPS: 8 TIME 11:57:59
Execution ended after 0:00:16.017383800
Setting pipeline to PAUSED ...
Setting pipeline to READY ...
Setting pipeline to NULL ...
Total time: 16.017389 seconds
Frames: 133 processed
Avg. FPS: 8.30
Freeing pipeline ...
입니까? (예 : 아래 예)
#include <gst/gst.h>
#define INPUT_FILE "/home/root/videos/vga1.h264"
#define POSITION_X 100
#define POSITION_Y 100
int
main (int argc, char *argv[])
{
GstElement *pipeline, *source, *parser, *decoder, *sink;
GstBus *bus;
GstMessage *msg;
const gchar *input_file = INPUT_FILE;
/* Initialization */
gst_init (&argc, &argv);
gst_pad_add_probe (
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("video-play");
source = gst_element_factory_make ("filesrc", "file-source");
parser = gst_element_factory_make ("h264parse", "h264-parser");
decoder = gst_element_factory_make ("omxh264dec", "h264-decoder");
sink = gst_element_factory_make ("waylandsink", "video-output");
if (!pipeline || !source || !parser || !decoder || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* Set input video file for source element */
g_object_set (G_OBJECT (source), "location", input_file, NULL);
/* Set position for displaying (100, 100) */
g_object_set (G_OBJECT (sink), "position-x", POSITION_X, "position-y", POSITION_Y, NULL);
/* Add all elements into the pipeline */
/* pipeline---[ file-source + h264-parser + h264-decoder + video-output ] */
gst_bin_add_many (GST_BIN (pipeline), source, parser, decoder, sink, NULL);
/* Link the elements together */
/* file-source -> h264-parser -> h264-decoder -> video-output */
if (gst_element_link_many (source, parser, decoder, sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Set the pipeline to "playing" state */
g_print ("Now playing: %s\n", input_file);
if (gst_element_set_state (pipeline,
GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
g_print ("Running...\n");
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Note that because input timeout is GST_CLOCK_TIME_NONE,
the gst_bus_timed_pop_filtered() function will block forever until a
matching message was posted on the bus (GST_MESSAGE_ERROR or
GST_MESSAGE_EOS). */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s.\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s.\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources and change state to NULL */
gst_object_unref (bus);
g_print ("Returned, stopping playback...\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Freeing pipeline...\n");
gst_object_unref (GST_OBJECT (pipeline));
g_print ("Completed. Goodbye!\n");
return 0;
}
이 소스에서 padprobe v : sink --timer를 구현하는 방법을 설명해 주시겠습니까? 감사.
아마도'fpsdisplaysink'가 도움이 될 수 있습니다. 렌더러 대신에 이것을 사용하여 실제 렌더러를 속성으로 설정할 수 있습니다. 그런 다음 fps를 계산하고 새로운 측정에 대한 이벤트를 만듭니다. 참조 : https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-bad/html/gst-plugins-bad-plugins-fpsdisplaysink.html –