这个程序是要实现linux下的一个简单播放器,要求播放器可以向远端服务器发送消息,相应键盘消息,切换节目linux
具体功能是:响应键盘的N、P和数字键,N:下一个节目;P:前一个节目;数字键:选择对应的节目ios
在切换节目的时候须要向远端服务器发送响应的消息。vim
#include <gst/gst.h>windows
#include <gst/video/video.h> #include <gst/interfaces/xoverlay.h> #include <gdk/gdkx.h> #include <gtk/gtk.h> #include <gdk/gdk.h> #include <gdk/gdkwindow.h> #include <glib.h> #include <glib/gprintf.h> #include <stdlib.h> #include <string.h> #include <gdk/gdkkeysyms.h> #include <signal.h> #include <sys/time.h> #include <time.h> #include <string.h> #include <assert.h> #include <vector> #include <string> #include <list> //#include <map> #include <iostream> #include <sys/socket.h> #include <netinet/in.h> #include <string.h> #include <arpa/inet.h> #include <unistd.h> using std::string; using std::vector; using std::list; using std::swap; //using namespace gstchannel; typedef struct _CustomData { GstElement *pipeline; GstElement *video_sink; GMainLoop *loop; gboolean playing; /* Playing or Paused */ gdouble rate; /* Current playback rate (can be negative) */ } CustomData; #define CH_NUM 3 /* Default video sink */ #define DEFAULT_VIDEOSINK "autovideosink" gint channel_index = 1;// gulong embed_xid; GtkWidget *video_window; GtkWidget *main_window; GdkWindow *video_window_xwindow; GstElement *sink; gchar channel[CH_NUM][128]; gchar* uri_udp[CH_NUM] = {"udp://127.0.0.1:5050", "udp://0.0.0.0:5050", "udp://0.0.0.0:5051"};//127.0.0.1:5050 gchar* uri_file[CH_NUM] = {"file:///home/ttk/Videos/test1.ts", "file:///home/ttk/Videos/test2.ts", "file:///home/ttk/Videos/test3.ts"}; /* slightly convoluted way to find a working video sink that's not a bin, * one could use autovideosink from gst-plugins-good instead */ static GstElement * find_video_sink (void) { GstStateChangeReturn sret; GstElement *sink; if ((sink = gst_element_factory_make ("xvimagesink", NULL))) { sret = gst_element_set_state (sink, GST_STATE_READY); if (sret == GST_STATE_CHANGE_SUCCESS) return sink; gst_element_set_state (sink, GST_STATE_NULL); } gst_object_unref (sink); if ((sink = gst_element_factory_make ("ximagesink", NULL))) { sret = gst_element_set_state (sink, GST_STATE_READY); if (sret == GST_STATE_CHANGE_SUCCESS) return sink; gst_element_set_state (sink, GST_STATE_NULL); } gst_object_unref (sink); if (strcmp (DEFAULT_VIDEOSINK, "xvimagesink") == 0 || strcmp (DEFAULT_VIDEOSINK, "ximagesink") == 0) return NULL; if ((sink = gst_element_factory_make (DEFAULT_VIDEOSINK, NULL))) { if (GST_IS_BIN (sink)) { gst_object_unref (sink); return NULL; } sret = gst_element_set_state (sink, GST_STATE_READY); if (sret == GST_STATE_CHANGE_SUCCESS) return sink; gst_element_set_state (sink, GST_STATE_NULL); } gst_object_unref (sink); return NULL; } /* This function is called when the GUI toolkit creates the physical window that will hold the video. * At this point we can retrieve its handler (which has a different meaning depending on the windowing system) * and pass it to GStreamer through the XOverlay interface. */ static void realize_cb (GtkWidget *widget, CustomData *data) { GdkWindow *window = gtk_widget_get_window (widget); guintptr window_handle; if (!gdk_window_ensure_native (window)) g_error ("Couldn't create native window needed for GstXOverlay!"); /* Retrieve window handler from GDK */ window_handle = GDK_WINDOW_XID (window); /* Pass it to playbin2, which implements XOverlay and will forward it to the video sink */ //gst_x_overlay_set_window_handle (GST_X_OVERLAY (data->pipeline), window_handle); gst_x_overlay_set_window_handle (GST_X_OVERLAY (sink), window_handle); } gint play_next_channel(gint next_channel, CustomData *data) { gint ret = -1; int sockfd_tv = -1; int sockfd_mmt = -1; int len_tv = 0; int len_mmt = 0; struct sockaddr_in server_addr_tv; struct sockaddr_in server_addr_mmt; char *server_addr[6] = {"0","1","2", "192.168.0.101:1100", "192.168.0.101:1200", "192.168.0.101:1300"}; if(0) { //state change //send signal to TIZEN TV for resize sockfd_tv = socket(AF_INET, SOCK_DGRAM, 0); server_addr_tv.sin_family = AF_INET; server_addr_tv.sin_port = htons(4040); server_addr_tv.sin_addr.s_addr = inet_addr("192.168.0.150"); len_tv = sendto(sockfd_tv, (char *)&(next_channel), 1, 0, (struct sockaddr*)&server_addr_tv, sizeof(server_addr_tv)); if(len_tv <= 0) { g_print("first send failed.\n"); //return -1; } g_print("change to channel: %d.\n", next_channel); close(sockfd_tv); //send signal to server for content change sockfd_mmt = socket(AF_INET, SOCK_STREAM, 0); server_addr_mmt.sin_family = AF_INET; server_addr_mmt.sin_port = htons(4096); server_addr_mmt.sin_addr.s_addr = inet_addr("192.168.0.160"); if (0 > connect(sockfd_mmt, (struct sockaddr *)&server_addr_mmt, sizeof(struct sockaddr))) { printf("connect failed.\n"); return -1; } printf("connect OK.\n"); //sleep(5); int len_mmt = write(sockfd_mmt, server_addr[next_channel], strlen(server_addr[next_channel])+1); if(len_mmt <= 0) { printf("first send failed.\n"); return -1; } printf("send msg: %s.\n", server_addr[next_channel]); close(sockfd_mmt); } if (next_channel < 3 || next_channel > 5) { if (!strcmp(channel[next_channel % CH_NUM], channel[channel_index % CH_NUM])) { channel_index = next_channel; return 0; } gst_element_set_state (data->pipeline, GST_STATE_NULL); g_object_set (data->pipeline, "uri", channel[next_channel % CH_NUM], NULL); sink = find_video_sink (); if (sink == NULL) g_error ("Couldn't find a working video sink."); g_object_set (GST_OBJECT (data->pipeline), "video-sink", sink, NULL); video_window_xwindow = gtk_widget_get_window (video_window); embed_xid = GDK_WINDOW_XID (video_window_xwindow); gst_x_overlay_set_window_handle (GST_X_OVERLAY (sink), embed_xid); gtk_window_set_title (GTK_WINDOW (main_window), channel[next_channel % CH_NUM]); ret = gst_element_set_state (data->pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (data->pipeline); gtk_main_quit (); return -1; } g_print ("Change channel from %d to %d, uri: %s res: %d\n", channel_index, next_channel, channel[next_channel % CH_NUM], ret); //channel_index = next_channel; //gtk_window_set_title (GTK_WINDOW(main_window), (const gchar*) channel[channel_index % CH_NUM]); } channel_index = next_channel; return 0; } /* * handle user input */ void process_input (int key, CustomData *data) { gint next_channel = -1; gint ret = -1; switch (key) { case KEY_HANDLER_RIGHT: break; case KEY_HANDLER_LEFT: break; case KEY_HANDLER_UP: break; case KEY_HANDLER_DOWN: break; case KEY_HANDLER_PAGE_UP: break; case KEY_HANDLER_PAGE_DOWN: break; case 'Q': case 'q': gst_element_set_state (data->pipeline, GST_STATE_NULL); gtk_main_quit (); break; case 'N': case 'n': next_channel = (channel_index + 1);// % CH_NUM; ret = play_next_channel(next_channel, data); if (ret) return; gtk_widget_show_all (main_window); break; case 'P': case 'p': next_channel = channel_index - 1; if (next_channel < 0) next_channel = 0; play_next_channel(next_channel, data); gtk_widget_show_all (main_window); break; default: g_print ("Key value: %d\n", key); break; } } /* Process keyboard input */ static gboolean key_press_event_cb (GtkWidget *widget, GdkEventKey *event, gpointer data) { int ch = 0; CustomData* priv_data = (CustomData*)data; if (event->keyval > 0 && event->keyval <= 127) ch = event->keyval; //else // ch = key_map[event->keyval]; if (ch != 0) { process_input (ch, priv_data); return true; } } /* This creates all the GTK+ widgets that compose our application, and registers the callbacks */ static void create_ui (CustomData *data) { //GtkWidget *main_window; /* The uppermost window, containing all other windows */ //GtkWidget *video_window; /* The drawing area where the video will be shown */ GtkWidget *main_box; /* VBox to hold main_hbox and the controls */ GtkWidget *main_hbox; /* HBox to hold the video_window and the stream info text widget */ main_window = gtk_window_new (GTK_WINDOW_TOPLEVEL); g_signal_connect (G_OBJECT (main_window), "key_press_event", G_CALLBACK (key_press_event_cb), data); video_window = gtk_drawing_area_new (); //gtk_widget_set_double_buffered (video_window, FALSE); g_signal_connect (video_window, "realize", G_CALLBACK (realize_cb), data); //g_signal_connect (video_window, "expose_event", G_CALLBACK (expose_cb), data); main_hbox = gtk_hbox_new (FALSE, 0); gtk_box_pack_start (GTK_BOX (main_hbox), video_window, TRUE, TRUE, 0); main_box = gtk_vbox_new (FALSE, 0); gtk_box_pack_start (GTK_BOX (main_box), main_hbox, TRUE, TRUE, 0); //gtk_box_pack_start (GTK_BOX (main_box), video_window, FALSE, TRUE, 0); gtk_container_add (GTK_CONTAINER (main_window), main_box); gtk_window_set_title (GTK_WINDOW(main_window), (const gchar*) channel[channel_index]); gtk_window_set_default_size (GTK_WINDOW (main_window), 1920, 1080); //gtk_window_set_type_hint (GTK_WINDOW(main_window),GDK_WINDOW_TYPE_HINT_DOCK); gtk_window_stick (GTK_WINDOW(main_window)); gtk_window_activate_focus (GTK_WINDOW(main_window)); gtk_widget_show_all (main_window); } gint main (gint argc, gchar *argv[]) { GstStateChangeReturn ret; CustomData data; int i = 0; if (argc > 1) { if (!strcmp(argv[1], "udp")) for (i = 0; i < CH_NUM; i++) strcpy(channel[i], uri_udp[i]); else for (i = 0; i < CH_NUM; i++) strcpy(channel[i], uri_file[i]); } else for (i = 0; i < CH_NUM; i++) strcpy(channel[i], uri_file[i]); /* init GStreamer && gtk */ gst_init (&argc, &argv); gtk_init(&argc, &argv); /* Initialize our data structure */ memset (&data, 0, sizeof (data)); //channel_index = 0; data.pipeline = gst_element_factory_make ("playbin2", "playbin2"); sink = find_video_sink (); if (sink == NULL) g_error ("Couldn't find a working video sink."); g_object_set (GST_OBJECT (data.pipeline), "video-sink", sink, NULL); /* Set the URI to play */ g_object_set (data.pipeline, "uri", channel[channel_index], NULL); /* prepare the ui */ main_window = gtk_window_new (GTK_WINDOW_TOPLEVEL); g_signal_connect (G_OBJECT (main_window), "key_press_event", G_CALLBACK (key_press_event_cb), (gpointer)&data); gtk_window_set_default_size (GTK_WINDOW (main_window), 1920, 1080); gtk_window_set_title (GTK_WINDOW (main_window), channel[channel_index]); video_window = gtk_drawing_area_new (); gtk_widget_set_double_buffered (video_window, FALSE); gtk_container_add (GTK_CONTAINER (main_window), video_window); gtk_container_set_border_width (GTK_CONTAINER (main_window), 1); gtk_widget_show_all (main_window); gtk_widget_realize (main_window); video_window_xwindow = gtk_widget_get_window (video_window); embed_xid = GDK_WINDOW_XID (video_window_xwindow); gst_x_overlay_set_window_handle (GST_X_OVERLAY (sink), embed_xid); /* Start playing */ ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (data.pipeline); return -1; } /* now run */ gtk_main(); /* Free resources */ gst_object_unref (gst_element_get_bus (data.pipeline)); gst_element_set_state (data.pipeline, GST_STATE_NULL); gst_object_unref (data.pipeline); return 0; }