efl

Форк
0
/
emotion_gstreamer.c 
1820 строк · 43.9 Кб
1
#ifdef HAVE_CONFIG_H
2
# include "config.h"
3
#endif
4
#include "emotion_gstreamer.h"
5

6
int _emotion_gstreamer_log_domain = -1;
7
Eina_Bool debug_fps = EINA_FALSE;
8

9
static int _emotion_init_count = 0;
10

11
/* Callbacks to get the eos */
12
static void _for_each_tag    (GstTagList const* list, gchar const* tag, void *data);
13
static void _free_metadata   (Emotion_Gstreamer_Metadata *m);
14

15
static GstElement * _create_pipeline (Emotion_Gstreamer *ev, Evas_Object *o, const char *uri, const char *suburi);
16

17
static GstBusSyncReply _bus_sync_handler(GstBus *bus,
18
                                         GstMessage *message,
19
                                         gpointer data);
20

21
static void em_audio_channel_volume_set(void *video, double vol);
22
static void em_audio_channel_mute_set(void *video, int mute);
23

24
/* Module interface */
25

26
static const char *
27
emotion_visualization_element_name_get(Emotion_Vis visualisation)
28
{
29
   switch (visualisation)
30
     {
31
      case EMOTION_VIS_NONE:
32
         return NULL;
33
      case EMOTION_VIS_GOOM:
34
         return "goom";
35
      case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
36
         return "libvisual_bumpscope";
37
      case EMOTION_VIS_LIBVISUAL_CORONA:
38
         return "libvisual_corona";
39
      case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
40
         return "libvisual_dancingparticles";
41
      case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
42
         return "libvisual_gdkpixbuf";
43
      case EMOTION_VIS_LIBVISUAL_G_FORCE:
44
         return "libvisual_G-Force";
45
      case EMOTION_VIS_LIBVISUAL_GOOM:
46
         return "libvisual_goom";
47
      case EMOTION_VIS_LIBVISUAL_INFINITE:
48
         return "libvisual_infinite";
49
      case EMOTION_VIS_LIBVISUAL_JAKDAW:
50
         return "libvisual_jakdaw";
51
      case EMOTION_VIS_LIBVISUAL_JESS:
52
         return "libvisual_jess";
53
      case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
54
         return "libvisual_lv_analyzer";
55
      case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
56
         return "libvisual_lv_flower";
57
      case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
58
         return "libvisual_lv_gltest";
59
      case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
60
         return "libvisual_lv_scope";
61
      case EMOTION_VIS_LIBVISUAL_MADSPIN:
62
         return "libvisual_madspin";
63
      case EMOTION_VIS_LIBVISUAL_NEBULUS:
64
         return "libvisual_nebulus";
65
      case EMOTION_VIS_LIBVISUAL_OINKSIE:
66
         return "libvisual_oinksie";
67
      case EMOTION_VIS_LIBVISUAL_PLASMA:
68
         return "libvisual_plazma";
69
      default:
70
         return "goom";
71
     }
72
}
73

74
Emotion_Gstreamer *
75
emotion_gstreamer_ref(Emotion_Gstreamer *ev)
76
{
77
  g_atomic_int_inc (&ev->ref_count);
78
  return ev;
79
}
80

81
void
82
emotion_gstreamer_unref(Emotion_Gstreamer *ev)
83
{
84
  if (g_atomic_int_dec_and_test(&ev->ref_count))
85
    {
86
       if (ev->subtitle)
87
         {
88
            eina_stringshare_del(ev->subtitle);
89
            ev->subtitle = NULL;
90
         }
91
       free(ev);
92
    }
93
}
94

95
static Eina_Bool
96
em_file_open(void *video,
97
             const char *file)
98
{
99
   Emotion_Gstreamer *ev = video;
100
   char *uri;
101
   char *suburi = NULL;
102
   gboolean mute = 0;
103
   gdouble vol = 0.0;
104

105
   if (!file) return EINA_FALSE;
106

107
   if (gst_uri_is_valid(file)) uri = strdup(file);
108
   else uri = gst_filename_to_uri(file, NULL);
109
   if (!uri) return EINA_FALSE;
110

111
   ev->shutdown = EINA_FALSE;
112
   ev->ready = EINA_FALSE;
113
   ev->live = EINA_FALSE;
114
   ev->buffering = EINA_FALSE;
115

116
   DBG("setting file to '%s'", uri);
117

118
   if (ev->subtitle)
119
     {
120
        if (gst_uri_is_valid(ev->subtitle)) suburi = strdup(ev->subtitle);
121
        else suburi = gst_filename_to_uri(ev->subtitle, NULL);
122
     }
123
   ev->pipeline = _create_pipeline(ev, ev->obj, uri, suburi);
124
   g_free(uri);
125

126
   if (!ev->pipeline)
127
     return EINA_FALSE;
128

129
   g_object_get(ev->pipeline, "volume", &vol, NULL);
130
   g_object_get(ev->pipeline, "mute", &mute, NULL);
131
   ev->volume = vol;
132
   ev->audio_mute = mute;
133

134
   ev->position = 0.0;
135

136
   return EINA_TRUE;
137
}
138

139
static void
140
em_file_close(void *video)
141
{
142
   Emotion_Gstreamer *ev = video;
143
   Eina_List *l;
144

145
   ev->shutdown = EINA_TRUE;
146

147
   if (ev->threads)
148
     {
149
        Ecore_Thread *t;
150

151
        EINA_LIST_FOREACH(ev->threads, l, t)
152
          {
153
             ecore_thread_cancel(t);
154
          }
155
     }
156

157
   if (ev->pipeline)
158
     {
159
       if (ev->audio_buffer_probe)
160
         {
161
            gst_pad_remove_probe(ev->audio_buffer_probe_pad, ev->audio_buffer_probe);
162
            gst_object_unref(ev->audio_buffer_probe_pad);
163
            ev->audio_buffer_probe_pad = NULL;
164
            ev->audio_buffer_probe = 0;
165
         }
166

167
       gst_element_set_state(ev->pipeline, GST_STATE_NULL);
168
       g_object_set(G_OBJECT(ev->vsink), "emotion-object", NULL, NULL);
169
       gst_object_unref(ev->pipeline);
170

171
       ev->pipeline = NULL;
172
       ev->vsink = NULL;
173
     }
174

175
   if (ev->metadata)
176
     {
177
        _free_metadata(ev->metadata);
178
        ev->metadata = NULL;
179
     }
180

181
   ev->ready = EINA_FALSE;
182
}
183

184
static void
185
em_del(void *video)
186
{
187
   Emotion_Gstreamer *ev = video;
188

189
   em_file_close(ev);
190

191
   emotion_gstreamer_unref(ev);
192
}
193

194
static void
195
em_play(void   *video,
196
        double  pos EINA_UNUSED)
197
{
198
   Emotion_Gstreamer *ev = video;
199

200
   if (!ev->pipeline) return;
201

202
   if (ev->ready && !ev->buffering)
203
     gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
204
   ev->play = EINA_TRUE;
205
}
206

207
static void
208
em_stop(void *video)
209
{
210
   Emotion_Gstreamer *ev = video;
211

212
   if (!ev->pipeline) return;
213

214
   if (ev->ready)
215
     gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
216
   ev->play = EINA_FALSE;
217
}
218

219
static void
220
em_size_get(void  *video,
221
            int   *width,
222
            int   *height)
223
{
224
   Emotion_Gstreamer *ev = video;
225
   gint cur;
226
   GstPad *pad;
227
   GstCaps *caps;
228
   GstVideoInfo info;
229

230
   if (width) *width = 0;
231
   if (height) *height = 0;
232

233
   if (!ev->ready)
234
     return;
235

236
   g_object_get(ev->pipeline, "current-video", &cur, NULL);
237
   g_signal_emit_by_name (ev->pipeline, "get-video-pad", cur, &pad);
238
   if (!pad)
239
     return;
240

241
   caps = gst_pad_get_current_caps(pad);
242
   gst_object_unref(pad);
243
   if (!caps)
244
     return;
245

246
   gst_video_info_from_caps (&info, caps);
247
   if (width) *width = info.width;
248
   if (height) *height = info.height;
249
   gst_caps_unref(caps);
250
}
251

252
static void
253
em_pos_set(void   *video,
254
           double  pos)
255
{
256
   Emotion_Gstreamer *ev = video;
257

258
   if (!ev->ready) return;
259

260
   gst_element_seek(ev->pipeline, 1.0,
261
                          GST_FORMAT_TIME,
262
                          GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
263
                          GST_SEEK_TYPE_SET,
264
                          (gint64)(pos * (double)GST_SECOND),
265
                          GST_SEEK_TYPE_NONE, -1);
266
}
267

268
static double
269
em_len_get(void *video)
270
{
271
   Emotion_Gstreamer *ev = video;
272
   gint64 val;
273
   gboolean ret;
274

275
   if (!ev->ready)
276
     return 0.0;
277

278
   ret = gst_element_query_duration(ev->pipeline, GST_FORMAT_TIME, &val);
279
   if (!ret || val == -1)
280
     return 0.0;
281

282
   return val / 1000000000.0;
283
}
284

285
static double
286
em_buffer_size_get(void *video)
287
{
288
   Emotion_Gstreamer *ev = video;
289
   GstQuery *query;
290
   gboolean busy;
291
   gint percent;
292

293
   if (!ev->ready) return 0.0;
294

295
   query = gst_query_new_buffering(GST_FORMAT_DEFAULT);
296
   if (gst_element_query(ev->pipeline, query))
297
     gst_query_parse_buffering_percent(query, &busy, &percent);
298
   else
299
     percent = 100;
300

301
   gst_query_unref(query);
302
   return ((float)(percent)) / 100.0;
303
}
304

305
static Eina_Bool
306
_em_fps_get(Emotion_Gstreamer *ev, int *n, int *d)
307
{
308
   gint cur;
309
   GstPad *pad;
310
   GstCaps *caps;
311
   GstVideoInfo info;
312
   Eina_Bool ret = EINA_FALSE;
313

314
   if (n) *n = 0;
315
   if (d) *d = 1;
316

317
   if (!ev->ready)
318
     goto on_error;
319

320
   g_object_get(ev->pipeline, "current-video", &cur, NULL);
321
   g_signal_emit_by_name (ev->pipeline, "get-video-pad", cur, &pad);
322
   if (!pad)
323
     goto on_error;
324

325
   caps = gst_pad_get_current_caps(pad);
326
   gst_object_unref(pad);
327
   if (!caps)
328
     goto on_error;
329

330
   gst_video_info_from_caps (&info, caps);
331
   if (n) *n = info.fps_n;
332
   if (d) *d = info.fps_d;
333
   gst_caps_unref(caps);
334
   ret = EINA_TRUE;
335

336
 on_error:
337

338
   return ret;
339
}
340

341
static int
342
em_fps_num_get(void *video)
343
{
344
   Emotion_Gstreamer *ev = video;
345
   int num;
346

347
   _em_fps_get(ev, &num, NULL);
348

349
   return num;
350
}
351

352
static int
353
em_fps_den_get(void *video)
354
{
355
   Emotion_Gstreamer *ev = video;
356
   int den;
357

358
   _em_fps_get(ev, NULL, &den);
359

360
   return den;
361
}
362

363
static double
364
em_fps_get(void *video)
365
{
366
   Emotion_Gstreamer *ev = video;
367
   int num, den;
368

369
   if (!ev->ready)
370
     return 0.0;
371

372
   _em_fps_get(ev, &num, &den);
373

374
   return (double)num / (double)den;
375
}
376

377
static double
378
em_pos_get(void *video)
379
{
380
   Emotion_Gstreamer *ev = video;
381
   gint64 val;
382
   gboolean ret;
383

384
   if (!ev->ready) return 0.0;
385

386
   ret = gst_element_query_position(ev->pipeline, GST_FORMAT_TIME, &val);
387
   if (!ret || val == -1)
388
     return ev->position;
389

390
   ev->position = val / 1000000000.0;
391
   return ev->position;
392
}
393

394
static void
395
em_vis_set(void *video,
396
           Emotion_Vis vis)
397
{
398
   Emotion_Gstreamer *ev = video;
399

400
   ev->vis = vis;
401
}
402

403
static Emotion_Vis
404
em_vis_get(void *video)
405
{
406
   Emotion_Gstreamer *ev = video;
407

408
   return ev->vis;
409
}
410

411
static Eina_Bool
412
em_vis_supported(void *ef EINA_UNUSED, Emotion_Vis vis)
413
{
414
   const char *name;
415
   GstElementFactory *factory;
416

417
   if (vis == EMOTION_VIS_NONE)
418
     return EINA_TRUE;
419

420
   name = emotion_visualization_element_name_get(vis);
421
   if (!name)
422
     return EINA_FALSE;
423

424
   factory = gst_element_factory_find(name);
425
   if (!factory)
426
     return EINA_FALSE;
427

428
   gst_object_unref(factory);
429
   return EINA_TRUE;
430
}
431

432
static double
433
em_ratio_get(void *video)
434
{
435
   Emotion_Gstreamer *ev = video;
436
   gint cur;
437
   GstPad *pad;
438
   GstCaps *caps;
439
   GstVideoInfo info;
440

441
   info.par_n = info.par_d = 1;
442

443
   if (!ev->ready)
444
     goto on_error;
445

446
   g_object_get(ev->pipeline, "current-video", &cur, NULL);
447
   g_signal_emit_by_name (ev->pipeline, "get-video-pad", cur, &pad);
448
   if (!pad)
449
     goto on_error;
450

451
   caps = gst_pad_get_current_caps(pad);
452
   gst_object_unref(pad);
453
   if (!caps)
454
     goto on_error;
455

456
   gst_video_info_from_caps (&info, caps);
457
   gst_caps_unref(caps);
458

459
 on_error:
460

461
   return (double)info.par_n / (double)info.par_d;
462
}
463

464
static int em_audio_channel_count(void *video);
465
static int em_video_channel_count(void *video);
466

467
static int
468
em_video_handled(void *video)
469
{
470
   Emotion_Gstreamer *ev = video;
471

472
   return em_video_channel_count(ev) > 0 ? 1 : 0;
473
}
474

475
static int
476
em_audio_handled(void *video)
477
{
478
   Emotion_Gstreamer *ev = video;
479

480
   return em_audio_channel_count(ev) > 0 ? 1 : 0;
481
}
482

483
static int
484
em_seekable(void *video)
485
{
486
   Emotion_Gstreamer *ev = video;
487
   GstQuery *query;
488
   int ret = 0;
489
   gboolean seekable;
490

491
   if (!ev->ready) return ret;
492

493
   query = gst_query_new_seeking(GST_FORMAT_TIME);
494
   if (!gst_element_query(ev->pipeline, query))
495
     goto on_error;
496

497
   gst_query_parse_seeking(query, NULL, &seekable, NULL, NULL);
498
   if (!seekable)
499
     goto on_error;
500

501
   ret = 1;
502

503
on_error:
504
   gst_query_unref(query);
505

506
   return ret;
507
}
508

509
static void
510
em_frame_done(void *video EINA_UNUSED)
511
{
512
}
513

514
static Emotion_Format
515
em_format_get(void *video)
516
{
517
   Emotion_Gstreamer *ev = video;
518
   gint cur;
519
   GstPad *pad;
520
   GstCaps *caps;
521
   GstVideoInfo info;
522
   Emotion_Format format = EMOTION_FORMAT_NONE;
523

524
   if (!ev->ready)
525
     goto on_error;
526

527
   g_object_get(ev->pipeline, "current-video", &cur, NULL);
528
   g_signal_emit_by_name (ev->pipeline, "get-video-pad", cur, &pad);
529
   if (!pad)
530
     goto on_error;
531

532
   caps = gst_pad_get_current_caps(pad);
533
   gst_object_unref(pad);
534
   if (!caps)
535
     goto on_error;
536

537
   gst_video_info_from_caps (&info, caps);
538
   gst_caps_unref(caps);
539

540
   switch (info.finfo->format)
541
     {
542
      case GST_VIDEO_FORMAT_I420:
543
         return EMOTION_FORMAT_I420;
544
      case GST_VIDEO_FORMAT_YV12:
545
         return EMOTION_FORMAT_YV12;
546
      case GST_VIDEO_FORMAT_YUY2:
547
         return EMOTION_FORMAT_YUY2;
548
      case GST_VIDEO_FORMAT_ARGB:
549
         /* FIXME: This will be wrong for big endian archs */
550
         return EMOTION_FORMAT_BGRA;
551
      default:
552
         return EMOTION_FORMAT_NONE;
553
     }
554

555
 on_error:
556

557
   return format;
558
}
559

560
static void
561
em_video_data_size_get(void *video, int *w, int *h)
562
{
563
   em_size_get(video, w, h);
564
}
565

566
static int
567
em_yuv_rows_get(void           *video EINA_UNUSED,
568
                int             w EINA_UNUSED,
569
                int             h EINA_UNUSED,
570
                unsigned char **yrows EINA_UNUSED,
571
                unsigned char **urows EINA_UNUSED,
572
                unsigned char **vrows EINA_UNUSED)
573
{
574
   return 0;
575
}
576

577
static int
578
em_bgra_data_get(void *video EINA_UNUSED, unsigned char **bgra_data EINA_UNUSED)
579
{
580
   return 0;
581
}
582

583
static void
584
em_event_feed(void *video, int event)
585
{
586
   Emotion_Gstreamer *ev = video;
587
   GstNavigationCommand command;
588

589
   if (!ev->ready) return;
590

591
   switch (event)
592
     {
593
      case EMOTION_EVENT_MENU1:
594
        command = GST_NAVIGATION_COMMAND_MENU1;
595
        break;
596
      case EMOTION_EVENT_MENU2:
597
        command = GST_NAVIGATION_COMMAND_MENU2;
598
        break;
599
      case EMOTION_EVENT_MENU3:
600
        command = GST_NAVIGATION_COMMAND_MENU3;
601
        break;
602
      case EMOTION_EVENT_MENU4:
603
        command = GST_NAVIGATION_COMMAND_MENU4;
604
        break;
605
      case EMOTION_EVENT_MENU5:
606
        command = GST_NAVIGATION_COMMAND_MENU5;
607
        break;
608
      case EMOTION_EVENT_MENU6:
609
        command = GST_NAVIGATION_COMMAND_MENU6;
610
        break;
611
      case EMOTION_EVENT_MENU7:
612
        command = GST_NAVIGATION_COMMAND_MENU7;
613
        break;
614
      case EMOTION_EVENT_UP:
615
        command = GST_NAVIGATION_COMMAND_UP;
616
        break;
617
      case EMOTION_EVENT_DOWN:
618
        command = GST_NAVIGATION_COMMAND_DOWN;
619
        break;
620
      case EMOTION_EVENT_LEFT:
621
        command = GST_NAVIGATION_COMMAND_LEFT;
622
        break;
623
      case EMOTION_EVENT_RIGHT:
624
        command = GST_NAVIGATION_COMMAND_RIGHT;
625
        break;
626
      case EMOTION_EVENT_SELECT:
627
        command = GST_NAVIGATION_COMMAND_ACTIVATE;
628
        break;
629
      case EMOTION_EVENT_NEXT:
630
        /* FIXME */
631
        command = GST_NAVIGATION_COMMAND_RIGHT;
632
        break;
633
      case EMOTION_EVENT_PREV:
634
        /* FIXME */
635
        command = GST_NAVIGATION_COMMAND_LEFT;
636
        break;
637
      case EMOTION_EVENT_ANGLE_NEXT:
638
        command = GST_NAVIGATION_COMMAND_NEXT_ANGLE;
639
        break;
640
      case EMOTION_EVENT_ANGLE_PREV:
641
        command = GST_NAVIGATION_COMMAND_PREV_ANGLE;
642
        break;
643
      case EMOTION_EVENT_FORCE:
644
        /* FIXME */
645
        command = GST_NAVIGATION_COMMAND_ACTIVATE;
646
        break;
647
      case EMOTION_EVENT_0:
648
      case EMOTION_EVENT_1:
649
      case EMOTION_EVENT_2:
650
      case EMOTION_EVENT_3:
651
      case EMOTION_EVENT_4:
652
      case EMOTION_EVENT_5:
653
      case EMOTION_EVENT_6:
654
      case EMOTION_EVENT_7:
655
      case EMOTION_EVENT_8:
656
      case EMOTION_EVENT_9:
657
      case EMOTION_EVENT_10:
658
      default:
659
        return;
660
        break;
661
     }
662

663
  gst_navigation_send_command (GST_NAVIGATION (ev->pipeline), command);
664
}
665

666
static void
667
em_event_mouse_button_feed(void *video, int button, int x, int y)
668
{
669
   Emotion_Gstreamer *ev = video;
670

671
   if (!ev->ready) return;
672

673
   /* FIXME */
674
   gst_navigation_send_mouse_event (GST_NAVIGATION (ev->pipeline), "mouse-button-press", button, x, y);
675
   gst_navigation_send_mouse_event (GST_NAVIGATION (ev->pipeline), "mouse-button-release", button, x, y);
676
}
677

678
static void
679
em_event_mouse_move_feed(void *video, int x, int y)
680
{
681
   Emotion_Gstreamer *ev = video;
682

683
   if (!ev->ready) return;
684

685
   gst_navigation_send_mouse_event (GST_NAVIGATION (ev->pipeline), "mouse-move", 0, x, y);
686
}
687

688
/* Video channels */
689
static int
690
em_video_channel_count(void *video)
691
{
692
   Emotion_Gstreamer *ev = video;
693
   gint n;
694

695
   if (!ev->ready) return 0;
696

697
   g_object_get(ev->pipeline, "n-video", &n, NULL);
698

699
   return n;
700
}
701

702
static void
703
em_video_channel_set(void *video,
704
                     int   channel)
705
{
706
   Emotion_Gstreamer *ev = video;
707

708
   if (!ev->ready) return;
709

710
   if (channel < 0) channel = -1;
711

712
   g_object_set (ev->pipeline, "current-video", channel, NULL);
713
}
714

715
static int
716
em_video_channel_get(void *video)
717
{
718
   Emotion_Gstreamer *ev = video;
719
   gint cur;
720

721
   if (!ev->ready) return -1;
722

723
   g_object_get(ev->pipeline, "current-video", &cur, NULL);
724

725
   return cur;
726
}
727

728
static void
729
em_video_subtitle_file_set(void *video,
730
                           const char *filepath)
731
{
732
   Emotion_Gstreamer *ev = video;
733

734
   eina_stringshare_replace(&(ev->subtitle), filepath);
735
}
736

737
static const char *
738
em_video_subtitle_file_get(void *video)
739
{
740
   Emotion_Gstreamer *ev = video;
741

742
   return ev->subtitle;
743
}
744

745
static const char *
746
em_video_channel_name_get(void *video EINA_UNUSED,
747
                          int   channel EINA_UNUSED)
748
{
749
   return NULL;
750
}
751

752
static void
753
em_video_channel_mute_set(void *video,
754
                          int   mute)
755
{
756
   Emotion_Gstreamer *ev = video;
757

758
   ev->video_mute = mute;
759
}
760

761
static int
762
em_video_channel_mute_get(void *video)
763
{
764
   Emotion_Gstreamer *ev = video;
765

766
   return ev->video_mute;
767
}
768

769
/* Audio channels */
770

771
static int
772
em_audio_channel_count(void *video)
773
{
774
   Emotion_Gstreamer *ev = video;
775
   gint n;
776

777
   if (!ev->ready) return 0;
778

779
   g_object_get(ev->pipeline, "n-audio", &n, NULL);
780

781
   return n;
782
}
783

784
static void
785
em_audio_channel_set(void *video,
786
                     int   channel)
787
{
788
   Emotion_Gstreamer *ev = video;
789

790
   if (!ev->ready) return;
791

792
   if (channel < 0) channel = -1;
793

794
   g_object_set (ev->pipeline, "current-audio", channel, NULL);
795
}
796

797
static int
798
em_audio_channel_get(void *video)
799
{
800
   Emotion_Gstreamer *ev = video;
801
   gint cur;
802

803
   if (!ev->ready) return -1;
804

805
   g_object_get(ev->pipeline, "current-audio", &cur, NULL);
806

807
   return cur;
808
}
809

810
static const char *
811
em_audio_channel_name_get(void *video EINA_UNUSED,
812
                          int   channel EINA_UNUSED)
813
{
814
   return NULL;
815
}
816

817
static void
818
em_audio_channel_mute_set(void *video,
819
                          int   mute)
820
{
821
   Emotion_Gstreamer *ev = video;
822

823
   ev->audio_mute = !!mute;
824

825
   if (!ev->pipeline) return;
826

827
   g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
828
}
829

830
static int
831
em_audio_channel_mute_get(void *video)
832
{
833
   Emotion_Gstreamer *ev = video;
834
   gboolean mute;
835

836
   if (!ev->pipeline)
837
     return ev->audio_mute;
838

839
   g_object_get(ev->pipeline, "mute", &mute, NULL);
840

841
   return !!mute;
842
}
843

844
static void
845
em_audio_channel_volume_set(void  *video,
846
                            double vol)
847
{
848
   Emotion_Gstreamer *ev = video;
849

850
   if (vol < 0.0)
851
     vol = 0.0;
852
   ev->volume = vol;
853

854
   if (!ev->pipeline) return;
855

856
   g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
857
}
858

859
static double
860
em_audio_channel_volume_get(void *video)
861
{
862
   Emotion_Gstreamer *ev = video;
863
   gdouble vol;
864

865
   if (!ev->pipeline)
866
     return ev->volume;
867

868
   g_object_get(ev->pipeline, "volume", &vol, NULL);
869

870
   return vol;
871
}
872

873
/* spu stuff */
874

875
static int
876
em_spu_channel_count(void *video)
877
{
878
   Emotion_Gstreamer *ev = video;
879
   gint n;
880

881
   if (!ev->ready) return 0;
882

883
   g_object_get(ev->pipeline, "n-text", &n, NULL);
884

885
   return n;
886
}
887

888
static void
889
em_spu_channel_set(void *video, int channel)
890
{
891
   Emotion_Gstreamer *ev = video;
892

893
   if (!ev->ready) return;
894

895
   if (channel < 0) channel = -1;
896

897
   g_object_set(ev->pipeline, "current-text", channel, NULL);
898
}
899

900
static int
901
em_spu_channel_get(void *video)
902
{
903
   Emotion_Gstreamer *ev = video;
904
   gint cur;
905

906
   if (!ev->ready) return -1;
907

908
   g_object_get(ev->pipeline, "current-text", &cur, NULL);
909

910
   return cur;
911
}
912

913
static const char *
914
em_spu_channel_name_get(void *video EINA_UNUSED, int channel EINA_UNUSED)
915
{
916
   return NULL;
917
}
918

919
static void
920
em_spu_channel_mute_set(void *video, int mute)
921
{
922
   Emotion_Gstreamer *ev = video;
923
   gint flags;
924

925
   ev->spu_mute = !!mute;
926

927
   if (!ev->pipeline) return;
928

929
   g_object_get(ev->pipeline, "flags", &flags, NULL);
930
   if (ev->spu_mute) flags &= ~GST_PLAY_FLAG_TEXT;
931
   else flags |= GST_PLAY_FLAG_TEXT;
932
   g_object_set(ev->pipeline, "flags", flags, NULL);
933
}
934

935
static int
936
em_spu_channel_mute_get(void *video)
937
{
938
   Emotion_Gstreamer *ev = video;
939
   gint flags;
940

941
   if (!ev->pipeline) return 0;
942

943
   g_object_get(ev->pipeline, "flags", &flags, NULL);
944

945
   return (flags & GST_PLAY_FLAG_TEXT) ? 0 : 1;
946
}
947

948
static int
949
em_chapter_count(void *video EINA_UNUSED)
950
{
951
   return 0;
952
}
953

954
static void
955
em_chapter_set(void *video EINA_UNUSED, int chapter EINA_UNUSED)
956
{
957
}
958

959
static int
960
em_chapter_get(void *video EINA_UNUSED)
961
{
962
   return 0;
963
}
964

965
static const char *
966
em_chapter_name_get(void *video EINA_UNUSED, int chapter EINA_UNUSED)
967
{
968
   return NULL;
969
}
970

971
static void
972
em_speed_set(void *video EINA_UNUSED, double speed EINA_UNUSED)
973
{
974
}
975

976
static double
977
em_speed_get(void *video EINA_UNUSED)
978
{
979
   return 1.0;
980
}
981

982
static int
983
em_eject(void *video EINA_UNUSED)
984
{
985
   return 1;
986
}
987

988
static void
989
_img_del_cb(void *data, Evas *e EINA_UNUSED, Evas_Object *obj EINA_UNUSED, void *event_info EINA_UNUSED)
990
{
991
   GstBuffer *buffer = data;
992

993
   gst_buffer_unref(buffer);
994
}
995

996
void *
997
em_meta_artwork_get(void *video, Evas_Object *img, const char *path, Emotion_Artwork_Info type)
998
{
999
   Emotion_Gstreamer *ev = video;
1000
   GError *err = NULL;
1001

1002
   if (!ev) return NULL;
1003

1004
   gst_init(NULL,NULL);
1005

1006
   gchar *uri = gst_filename_to_uri(path, NULL);
1007

1008
   GstDiscoverer *discoverer = gst_discoverer_new(10 * GST_SECOND, &err);
1009
   if (!discoverer) return NULL;
1010
   GstDiscovererInfo* info = gst_discoverer_discover_uri(discoverer,
1011
                                 uri, &err);
1012
   if (!info) return NULL;
1013

1014
   int ret = gst_discoverer_info_get_result(info);
1015
   if (ret != GST_DISCOVERER_OK) goto done;
1016

1017
   const GstTagList *tags = gst_discoverer_info_get_tags(info);
1018

1019
   GstSample *sample;
1020
   GstBuffer *buffer;
1021
   GstMapInfo map;
1022

1023
   const gchar *tag = GST_TAG_PREVIEW_IMAGE;
1024
   if (type == EMOTION_ARTWORK_IMAGE) tag = GST_TAG_IMAGE;
1025

1026
   if (gst_tag_list_get_sample(tags, tag, &sample))
1027
     {
1028
        buffer = gst_sample_get_buffer(sample);
1029
        if (!buffer)
1030
          {
1031
             evas_object_del(img);
1032
             img = NULL;
1033
             goto done;
1034
          }
1035

1036
        if (gst_buffer_map(gst_buffer_ref(buffer), &map, GST_MAP_READ))
1037
          {
1038
             evas_object_image_memfile_set(img, map.data, map.size, NULL, NULL);
1039
             evas_object_event_callback_add(img, EVAS_CALLBACK_DEL, _img_del_cb, buffer);
1040
          }
1041
        gst_sample_unref(sample);
1042
     }
1043
   else
1044
     {
1045
        evas_object_del(img);
1046
        img = NULL;
1047
     }
1048

1049
done:
1050
   if (err) g_error_free(err);
1051

1052
   gst_discoverer_info_unref(info);
1053
   g_free(uri);
1054
   g_object_unref(discoverer);
1055

1056
   return img;
1057
}
1058

1059
static const char *
1060
em_meta_get(void *video, int meta)
1061
{
1062
   Emotion_Gstreamer *ev = video;
1063
   const char *str = NULL;
1064

1065
   if (!ev->metadata) return NULL;
1066

1067
   switch (meta)
1068
     {
1069
      case META_TRACK_TITLE:
1070
         str = ev->metadata->title;
1071
         break;
1072
      case META_TRACK_ARTIST:
1073
         str = ev->metadata->artist;
1074
         break;
1075
      case  META_TRACK_ALBUM:
1076
         str = ev->metadata->album;
1077
         break;
1078
      case META_TRACK_YEAR:
1079
         str = ev->metadata->year;
1080
         break;
1081
      case META_TRACK_GENRE:
1082
         str = ev->metadata->genre;
1083
         break;
1084
      case META_TRACK_COMMENT:
1085
         str = ev->metadata->comment;
1086
         break;
1087
      case META_TRACK_DISCID:
1088
         str = ev->metadata->disc_id;
1089
         break;
1090
      default:
1091
         break;
1092
     }
1093

1094
   return str;
1095
}
1096

1097
static void *
1098
em_add(const Emotion_Engine *api,
1099
       Evas_Object *obj,
1100
       const Emotion_Module_Options *opt EINA_UNUSED)
1101
{
1102
   Emotion_Gstreamer *ev;
1103

1104
   ev = calloc(1, sizeof(Emotion_Gstreamer));
1105
   EINA_SAFETY_ON_NULL_RETURN_VAL(ev, NULL);
1106

1107
   ev->api = api;
1108
   ev->obj = obj;
1109

1110
   ev->ref_count = 1;
1111

1112
   /* Default values */
1113
   ev->vis = EMOTION_VIS_NONE;
1114
   ev->volume = 1.0;
1115
   ev->ready = EINA_FALSE;
1116
   ev->shutdown = EINA_FALSE;
1117
   ev->threads = NULL;
1118
   ev->spu_mute = EINA_TRUE;
1119

1120
   return ev;
1121
}
1122

1123
static const Emotion_Engine em_engine =
1124
{
1125
   EMOTION_ENGINE_API_VERSION,
1126
   EMOTION_ENGINE_PRIORITY_DEFAULT,
1127
   "gstreamer1",
1128
   em_add, /* add */
1129
   em_del, /* del */
1130
   em_file_open, /* file_open */
1131
   em_file_close, /* file_close */
1132
   em_play, /* play */
1133
   em_stop, /* stop */
1134
   em_size_get, /* size_get */
1135
   em_pos_set, /* pos_set */
1136
   em_len_get, /* len_get */
1137
   em_buffer_size_get, /* buffer_size_get */
1138
   em_fps_num_get, /* fps_num_get */
1139
   em_fps_den_get, /* fps_den_get */
1140
   em_fps_get, /* fps_get */
1141
   em_pos_get, /* pos_get */
1142
   em_vis_set, /* vis_set */
1143
   em_vis_get, /* vis_get */
1144
   em_vis_supported, /* vis_supported */
1145
   em_ratio_get, /* ratio_get */
1146
   em_video_handled, /* video_handled */
1147
   em_audio_handled, /* audio_handled */
1148
   em_seekable, /* seekable */
1149
   em_frame_done, /* frame_done */
1150
   em_format_get, /* format_get */
1151
   em_video_data_size_get, /* video_data_size_get */
1152
   em_yuv_rows_get, /* yuv_rows_get */
1153
   em_bgra_data_get, /* bgra_data_get */
1154
   em_event_feed, /* event_feed */
1155
   em_event_mouse_button_feed, /* event_mouse_button_feed */
1156
   em_event_mouse_move_feed, /* event_mouse_move_feed */
1157
   em_video_channel_count, /* video_channel_count */
1158
   em_video_channel_set, /* video_channel_set */
1159
   em_video_channel_get, /* video_channel_get */
1160
   em_video_subtitle_file_set, /* video_subtitle_file_set */
1161
   em_video_subtitle_file_get, /* video_subtitle_file_get */
1162
   em_video_channel_name_get, /* video_channel_name_get */
1163
   em_video_channel_mute_set, /* video_channel_mute_set */
1164
   em_video_channel_mute_get, /* video_channel_mute_get */
1165
   em_audio_channel_count, /* audio_channel_count */
1166
   em_audio_channel_set, /* audio_channel_set */
1167
   em_audio_channel_get, /* audio_channel_get */
1168
   em_audio_channel_name_get, /* audio_channel_name_get */
1169
   em_audio_channel_mute_set, /* audio_channel_mute_set */
1170
   em_audio_channel_mute_get, /* audio_channel_mute_get */
1171
   em_audio_channel_volume_set, /* audio_channel_volume_set */
1172
   em_audio_channel_volume_get, /* audio_channel_volume_get */
1173
   em_spu_channel_count, /* spu_channel_count */
1174
   em_spu_channel_set, /* spu_channel_set */
1175
   em_spu_channel_get, /* spu_channel_get */
1176
   em_spu_channel_name_get, /* spu_channel_name_get */
1177
   em_spu_channel_mute_set, /* spu_channel_mute_set */
1178
   em_spu_channel_mute_get, /* spu_channel_mute_get */
1179
   em_chapter_count, /* chapter_count */
1180
   em_chapter_set, /* chapter_set */
1181
   em_chapter_get, /* chapter_get */
1182
   em_chapter_name_get, /* chapter_name_get */
1183
   em_speed_set, /* speed_set */
1184
   em_speed_get, /* speed_get */
1185
   em_eject, /* eject */
1186
   em_meta_get, /* meta_get */
1187
   NULL, /* priority_set */
1188
   NULL, /* priority_get */
1189
   em_meta_artwork_get,
1190
};
1191

1192
Eina_Bool
1193
gstreamer_module_init(void)
1194
{
1195
   GError *error;
1196

1197
   if (_emotion_init_count > 0)
1198
     {
1199
        _emotion_pending_ecore_begin();
1200
        return EINA_TRUE;
1201
     }
1202

1203
   if (getenv("EMOTION_FPS_DEBUG")) debug_fps = EINA_TRUE;
1204

1205
   eina_threads_init();
1206
   eina_log_threads_enable();
1207
   _emotion_gstreamer_log_domain = eina_log_domain_register
1208
     ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
1209
   if (_emotion_gstreamer_log_domain < 0)
1210
     {
1211
        EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
1212
        return EINA_FALSE;
1213
     }
1214

1215
   if (!gst_init_check(0, NULL, &error))
1216
     {
1217
        EINA_LOG_CRIT("Could not init GStreamer");
1218
        goto error_gst_init;
1219
     }
1220

1221
   if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1222
                                  "emotion-sink",
1223
                                  "video sink plugin for Emotion",
1224
                                  gstreamer_plugin_init,
1225
                                  VERSION,
1226
                                  "LGPL",
1227
                                  "Enlightenment",
1228
                                  PACKAGE,
1229
                                  "http://www.enlightenment.org/") == FALSE)
1230
     {
1231
        EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1232
        goto error_gst_plugin;
1233
     }
1234

1235
   if (!_emotion_module_register(&em_engine))
1236
     {
1237
        ERR("Could not register module %p", &em_engine);
1238
        goto error_register;
1239
     }
1240

1241
   _emotion_init_count = 1;
1242
   return EINA_TRUE;
1243

1244
 error_register:
1245
 error_gst_plugin:
1246

1247
   gst_deinit();
1248

1249
 error_gst_init:
1250
   eina_log_domain_unregister(_emotion_gstreamer_log_domain);
1251
   _emotion_gstreamer_log_domain = -1;
1252

1253
   return EINA_FALSE;
1254
}
1255

1256
void
1257
gstreamer_module_shutdown(void)
1258
{
1259
   if (_emotion_init_count > 1)
1260
     {
1261
        _emotion_init_count--;
1262
        return;
1263
     }
1264
   else if (_emotion_init_count == 0)
1265
     {
1266
        EINA_LOG_ERR("too many gstreamer_module_shutdown()");
1267
        return;
1268
     }
1269
   _emotion_init_count = 0;
1270

1271
   _emotion_module_unregister(&em_engine);
1272

1273
   eina_log_domain_unregister(_emotion_gstreamer_log_domain);
1274
   _emotion_gstreamer_log_domain = -1;
1275

1276
   gst_deinit();
1277
}
1278

1279
#ifndef EMOTION_STATIC_BUILD_GSTREAMER
1280

1281
EINA_MODULE_INIT(gstreamer_module_init);
1282
EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
1283

1284
#endif
1285

1286
static void
1287
_for_each_tag(GstTagList const* list,
1288
                    gchar const* tag,
1289
                    void *data)
1290
{
1291
   Emotion_Gstreamer *ev;
1292
   int i;
1293
   int count;
1294

1295

1296
   ev = (Emotion_Gstreamer*)data;
1297

1298
   if (!ev || !ev->metadata) return;
1299

1300
   count = gst_tag_list_get_tag_size(list, tag);
1301

1302
   for (i = 0; i < count; i++)
1303
     {
1304
        if (!strcmp(tag, GST_TAG_TITLE))
1305
          {
1306
             char *str;
1307
             g_free(ev->metadata->title);
1308
             if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
1309
               ev->metadata->title = str;
1310
             else
1311
               ev->metadata->title = NULL;
1312
             break;
1313
          }
1314
        if (!strcmp(tag, GST_TAG_ALBUM))
1315
          {
1316
             gchar *str;
1317
             g_free(ev->metadata->album);
1318
             if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
1319
               ev->metadata->album = str;
1320
             else
1321
               ev->metadata->album = NULL;
1322
             break;
1323
          }
1324
        if (!strcmp(tag, GST_TAG_ARTIST))
1325
          {
1326
             gchar *str;
1327
             g_free(ev->metadata->artist);
1328
             if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
1329
               ev->metadata->artist = str;
1330
             else
1331
               ev->metadata->artist = NULL;
1332
             break;
1333
          }
1334
        if (!strcmp(tag, GST_TAG_GENRE))
1335
          {
1336
             gchar *str;
1337
             g_free(ev->metadata->genre);
1338
             if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
1339
               ev->metadata->genre = str;
1340
             else
1341
               ev->metadata->genre = NULL;
1342
             break;
1343
          }
1344
        if (!strcmp(tag, GST_TAG_COMMENT))
1345
          {
1346
             gchar *str;
1347
             g_free(ev->metadata->comment);
1348
             if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
1349
               ev->metadata->comment = str;
1350
             else
1351
               ev->metadata->comment = NULL;
1352
             break;
1353
          }
1354
        if (!strcmp(tag, GST_TAG_DATE))
1355
          {
1356
             gchar *str;
1357
             const GValue *date;
1358
             g_free(ev->metadata->year);
1359
             date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
1360
             if (date)
1361
               str = g_strdup_value_contents(date);
1362
             else
1363
               str = NULL;
1364
             ev->metadata->year = str;
1365
             break;
1366
          }
1367

1368
        if (!strcmp(tag, GST_TAG_DATE_TIME))
1369
          {
1370
             gchar *str;
1371
             const GValue *date;
1372
             g_free(ev->metadata->year);
1373
             date = gst_tag_list_get_value_index(list, GST_TAG_DATE_TIME, 0);
1374
             if (date)
1375
               str = g_strdup_value_contents(date);
1376
             else
1377
               str = NULL;
1378
             ev->metadata->year = str;
1379
             break;
1380
          }
1381

1382
        if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
1383
          {
1384
             gchar *str;
1385
             const GValue *track;
1386
             g_free(ev->metadata->count);
1387
             track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0);
1388
             if (track)
1389
               str = g_strdup_value_contents(track);
1390
             else
1391
               str = NULL;
1392
             ev->metadata->count = str;
1393
             break;
1394
          }
1395

1396
        if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
1397
          {
1398
             gchar *str;
1399
             const GValue *discid;
1400
             g_free(ev->metadata->disc_id);
1401
             discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0);
1402
             if (discid)
1403
               str = g_strdup_value_contents(discid);
1404
             else
1405
               str = NULL;
1406
             ev->metadata->disc_id = str;
1407
             break;
1408
          }
1409
     }
1410

1411
}
1412

1413
static void
1414
_free_metadata(Emotion_Gstreamer_Metadata *m)
1415
{
1416
  if (!m) return;
1417

1418
  g_free(m->title);
1419
  g_free(m->album);
1420
  g_free(m->artist);
1421
  g_free(m->genre);
1422
  g_free(m->comment);
1423
  g_free(m->year);
1424
  g_free(m->count);
1425
  g_free(m->disc_id);
1426

1427
  free(m);
1428
}
1429

1430
static void
1431
audio_buffer_probe_main(void *data)
1432
{
1433
   Emotion_Gstreamer *ev = data;
1434

1435
   if (!ev->shutdown)
1436
     _emotion_frame_new(ev->obj);
1437

1438
   g_atomic_int_set(&ev->audio_buffer_probe_pending, 0);
1439

1440
   emotion_gstreamer_unref(ev);
1441
   _emotion_pending_ecore_end();
1442
}
1443

1444
static GstPadProbeReturn
1445
audio_buffer_probe(GstPad *pad EINA_UNUSED, GstPadProbeInfo *info EINA_UNUSED, gpointer user_data)
1446
{
1447
   Emotion_Gstreamer *ev = user_data;
1448

1449
   /* Don't call too many of these */
1450
   if (!g_atomic_int_compare_and_exchange(&ev->audio_buffer_probe_pending, 0, 1))
1451
     return GST_PAD_PROBE_OK;
1452

1453
   _emotion_pending_ecore_begin();
1454
   ecore_main_loop_thread_safe_call_async(audio_buffer_probe_main, emotion_gstreamer_ref(ev));
1455

1456
   return GST_PAD_PROBE_OK;
1457
}
1458

1459
static void
1460
_bus_main_handler(void *data)
1461
{
1462
   Emotion_Gstreamer_Message *send;
1463
   Emotion_Gstreamer *ev;
1464
   GstMessage              *msg;
1465

1466
   send = data;
1467
   ev = send->ev;
1468
   msg = send->msg;
1469

1470
   /* Just exit immediately if we're shutting down */
1471
   if (ev->shutdown)
1472
     {
1473
        emotion_gstreamer_message_free(send);
1474
        _emotion_pending_ecore_end();
1475
        return;
1476
     }
1477

1478
   switch (GST_MESSAGE_TYPE(msg))
1479
     {
1480
      case GST_MESSAGE_EOS:
1481
         ev->play = EINA_FALSE;
1482
         _emotion_decode_stop(ev->obj);
1483
         _emotion_playback_finished(ev->obj);
1484
         break;
1485
      case GST_MESSAGE_TAG:
1486
        {
1487
           GstTagList *new_tags;
1488
           gst_message_parse_tag(msg, &new_tags);
1489
           if (new_tags)
1490
             {
1491
                gst_tag_list_foreach(new_tags,
1492
                                     (GstTagForeachFunc)_for_each_tag,
1493
                                     ev);
1494
                gst_tag_list_free(new_tags);
1495
             }
1496
           _emotion_title_set(ev->obj, ev->metadata->title);
1497
           break;
1498
        }
1499
      case GST_MESSAGE_ASYNC_DONE:
1500
         _emotion_seek_done(ev->obj);
1501
         break;
1502
      case GST_MESSAGE_STATE_CHANGED:
1503
        {
1504
           GstState old_state, new_state;
1505

1506
           gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
1507
           INF("Element %s changed state from %s to %s.",
1508
               GST_OBJECT_NAME(msg->src),
1509
               gst_element_state_get_name(old_state),
1510
               gst_element_state_get_name(new_state));
1511

1512
           if (GST_MESSAGE_SRC(msg) == GST_OBJECT(ev->pipeline) && new_state >= GST_STATE_PAUSED && !ev->ready)
1513
             {
1514
                gint n_audio, n_video;
1515

1516
                ev->ready = EINA_TRUE;
1517

1518
                g_object_get(G_OBJECT(ev->pipeline),
1519
                  "n-audio", &n_audio,
1520
                  "n-video", &n_video,
1521
                  NULL);
1522

1523
                if (n_audio == 0 && n_video == 0)
1524
                  ERR("No audio nor video stream found");
1525

1526
                if (n_audio > 0 && n_video == 0)
1527
                  {
1528
                     GstElement *vis = NULL;
1529
                     gint flags;
1530
                     const char *vis_name;
1531

1532
                     if ((vis_name = emotion_visualization_element_name_get(ev->vis)))
1533
                       {
1534
                          vis = gst_element_factory_make(vis_name, "vis");
1535
                          g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
1536
                          g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
1537
                          flags |= GST_PLAY_FLAG_VIS;
1538
                          g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
1539
                       }
1540
                     else
1541
                       {
1542
                          GstElement *audio_sink;
1543

1544
                          g_object_get(ev->pipeline, "audio-sink", &audio_sink, NULL);
1545
                          ev->audio_buffer_probe_pad = gst_element_get_static_pad(audio_sink, "sink");
1546
                          ev->audio_buffer_probe = gst_pad_add_probe(ev->audio_buffer_probe_pad,
1547
                                                                     GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BUFFER_LIST,
1548
                                                                     audio_buffer_probe,
1549
                                                                     ev,
1550
                                                                     NULL);
1551
                          gst_object_unref(audio_sink);
1552
                       }
1553
                  }
1554

1555
                if (n_audio > 0 || n_video > 0)
1556
                  {
1557
                     /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1558
                     /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1559

1560
#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
1561
                     if (getuid() == geteuid())
1562
#endif
1563
                       {
1564
                          if (getenv("EMOTION_GSTREAMER_DOT"))
1565
                            {
1566
                               GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1567
                                                                 GST_DEBUG_GRAPH_SHOW_ALL,
1568
                                                                 getenv("EMOTION_GSTREAMER_DOT"));
1569
                            }
1570
                       }
1571

1572
                     _emotion_open_done(ev->obj);
1573
                     _emotion_playback_started(ev->obj);
1574
                  }
1575
             }
1576
           break;
1577
        }
1578
      case GST_MESSAGE_ERROR:
1579
        {
1580
           GError *err = NULL;
1581
           gchar *name, *debug = NULL;
1582

1583
           name = gst_object_get_path_string (msg->src);
1584
           gst_message_parse_error (msg, &err, &debug);
1585

1586
           ERR("ERROR: from element %s: %s\nAdditional debug info:\n%s", name, err->message, debug);
1587

1588
           g_error_free (err);
1589
           g_free (debug);
1590
           g_free (name);
1591

1592
           gst_element_set_state(ev->pipeline, GST_STATE_NULL);
1593

1594
           ev->play = EINA_FALSE;
1595
           _emotion_decode_stop(ev->obj);
1596
           _emotion_playback_finished(ev->obj);
1597

1598
           break;
1599
        }
1600
      case GST_MESSAGE_WARNING:
1601
        {
1602
           GError *err = NULL;
1603
           gchar *name, *debug = NULL;
1604

1605
           name = gst_object_get_path_string (msg->src);
1606
           gst_message_parse_warning (msg, &err, &debug);
1607

1608
           WRN("WARNING: from element %s: %s\nAdditional debug info:\n%s", name, err->message, debug);
1609

1610
           g_error_free (err);
1611
           g_free (debug);
1612
           g_free (name);
1613

1614
           break;
1615
        }
1616
      case GST_MESSAGE_BUFFERING:
1617
        {
1618
           gint percent = 0;
1619

1620
           /* If the stream is live, we do not care about buffering. */
1621
           if (ev->live)
1622
             {
1623
                ev->buffering = FALSE;
1624
                break;
1625
             }
1626

1627
           gst_message_parse_buffering (msg, &percent);
1628

1629
           /* Wait until buffering is complete before start/resume playing */
1630
           if (percent < 100)
1631
             gst_element_set_state (ev->pipeline, GST_STATE_PAUSED);
1632
           else if (ev->play)
1633
             gst_element_set_state (ev->pipeline, GST_STATE_PLAYING);
1634

1635
           ev->buffering = (percent < 100);
1636

1637
           break;
1638
        }
1639
      case GST_MESSAGE_CLOCK_LOST:
1640
        {
1641
           gst_element_set_state (ev->pipeline, GST_STATE_PAUSED);
1642
           gst_element_set_state (ev->pipeline, GST_STATE_PLAYING);
1643
           break;
1644
        }
1645
      default:
1646
         break;
1647
     }
1648

1649
   emotion_gstreamer_message_free(send);
1650
   _emotion_pending_ecore_end();
1651
}
1652

1653
static GstBusSyncReply
1654
_bus_sync_handler(GstBus *bus EINA_UNUSED, GstMessage *msg, gpointer data)
1655
{
1656
   Emotion_Gstreamer *ev = data;
1657
   Emotion_Gstreamer_Message *send;
1658

1659
   INF("Message %s from %s",
1660
       GST_MESSAGE_TYPE_NAME(msg),
1661
       GST_MESSAGE_SRC_NAME(msg));
1662

1663
   send = emotion_gstreamer_message_alloc(ev, msg);
1664

1665
   if (send)
1666
     {
1667
        _emotion_pending_ecore_begin();
1668
        ecore_main_loop_thread_safe_call_async(_bus_main_handler, send);
1669
     }
1670

1671
   gst_message_unref(msg);
1672

1673
   return GST_BUS_DROP;
1674
}
1675

1676
static void
1677
_emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
1678
{
1679
   Emotion_Gstreamer *ev = data;
1680
   gboolean res;
1681

1682
   if (ecore_thread_check(thread) || !ev->pipeline) return;
1683

1684
   gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
1685
   res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1686
   if (res == GST_STATE_CHANGE_NO_PREROLL)
1687
     {
1688
        ev->live = EINA_TRUE;
1689
        gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1690
        gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1691
     }
1692
}
1693

1694
static void
1695
_emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
1696
{
1697
   Emotion_Gstreamer *ev = data;
1698

1699
   ev->threads = eina_list_remove(ev->threads, thread);
1700

1701
#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
1702
   if (getuid() == geteuid())
1703
#endif
1704
     {
1705
        if (getenv("EMOTION_GSTREAMER_DOT"))
1706
          {
1707
             GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1708
                                               GST_DEBUG_GRAPH_SHOW_ALL,
1709
                                               getenv("EMOTION_GSTREAMER_DOT"));
1710
          }
1711
     }
1712

1713
   emotion_gstreamer_unref(ev);
1714
}
1715

1716
static void
1717
_emotion_gstreamer_end(void *data, Ecore_Thread *thread)
1718
{
1719
   Emotion_Gstreamer *ev = data;
1720

1721
   ev->threads = eina_list_remove(ev->threads, thread);
1722

1723
   if (ev->play && !ev->buffering)
1724
     {
1725
        gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1726
     }
1727

1728
#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
1729
   if (getuid() == geteuid())
1730
#endif
1731
     {
1732
        if (getenv("EMOTION_GSTREAMER_DOT"))
1733
          {
1734
             GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1735
                                               GST_DEBUG_GRAPH_SHOW_ALL,
1736
                                               getenv("EMOTION_GSTREAMER_DOT"));
1737
          }
1738
     }
1739

1740
   emotion_gstreamer_unref(ev);
1741
}
1742

1743
static GstElement *
1744
_create_pipeline(Emotion_Gstreamer *ev,
1745
                 Evas_Object *o,
1746
                 const char *uri,
1747
                 const char *suburi)
1748
{
1749
   GstElement *playbin;
1750
   GstElement *vsink;
1751
   GstBus *bus;
1752
   int flags;
1753

1754
   if (!uri)
1755
     return NULL;
1756

1757
   playbin = gst_element_factory_make("playbin", "playbin");
1758
   if (!playbin)
1759
     {
1760
        ERR("Unable to create 'playbin' GstElement.");
1761
        return NULL;
1762
     }
1763

1764
   vsink = gst_element_factory_make("emotion-sink", "sink");
1765
   if (!vsink)
1766
     {
1767
        ERR("Unable to create 'emotion-sink' GstElement.");
1768
        goto unref_pipeline;
1769
     }
1770

1771
   g_object_set(G_OBJECT(vsink), "emotion-object", o, NULL);
1772

1773
   g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
1774
   if (ev->spu_mute) flags &= ~GST_PLAY_FLAG_TEXT;
1775
   else flags |= GST_PLAY_FLAG_TEXT;
1776
   g_object_set(G_OBJECT(playbin), "flags", (flags | GST_PLAY_FLAG_DOWNLOAD), NULL);
1777
   g_object_set(G_OBJECT(playbin), "video-sink", vsink, NULL);
1778
   g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
1779
   if (suburi)
1780
     {
1781
        g_object_set(G_OBJECT(playbin), "suburi", suburi, NULL);
1782
        g_object_set(G_OBJECT(playbin), "subtitle-font-desc", "Sans, 10", NULL);
1783
     }
1784

1785
   bus = gst_element_get_bus(playbin);
1786
   gst_bus_set_sync_handler(bus, _bus_sync_handler, ev, NULL);
1787
   gst_object_unref(bus);
1788

1789
   ev->pipeline = playbin;
1790
   ev->vsink = vsink;
1791

1792
   ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
1793

1794
   ev->threads = eina_list_append(ev->threads,
1795
                                  ecore_thread_run(_emotion_gstreamer_pause,
1796
                                                   _emotion_gstreamer_end,
1797
                                                   _emotion_gstreamer_cancel,
1798
                                                   emotion_gstreamer_ref(ev)));
1799

1800
   /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1801
   /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1802
#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
1803
   if (getuid() == geteuid())
1804
#endif
1805
     {
1806
        if (getenv("EMOTION_GSTREAMER_DOT"))
1807
          {
1808
             GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin),
1809
                                               GST_DEBUG_GRAPH_SHOW_ALL,
1810
                                               getenv("EMOTION_GSTREAMER_DOT"));
1811
          }
1812
     }
1813

1814
   return playbin;
1815

1816
 unref_pipeline:
1817
   gst_object_unref(vsink);
1818
   gst_object_unref(playbin);
1819
   return NULL;
1820
}
1821

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.