efl

Форк
0
/
emotion_smart.c 
2062 строки · 55.7 Кб
1
#ifdef HAVE_CONFIG_H
2
# include "config.h"
3
#endif
4

5
#define EFL_CANVAS_OBJECT_PROTECTED
6
#define EFL_CANVAS_GROUP_PROTECTED
7

8
#include <Evas.h>
9
#include <Ecore.h>
10

11
#ifdef HAVE_EIO
12
# include <math.h>
13
# include <Eio.h>
14
#endif
15

16
#define EFL_INTERNAL_UNSTABLE
17
#include <Evas_Internal.h>
18

19
#include "Emotion.h"
20
#include "emotion_private.h"
21

22
#include "canvas/evas_canvas_eo.h"
23

24
#ifdef _WIN32
25
# define FMT_UCHAR "%c"
26
#else
27
# define FMT_UCHAR "%hhu"
28
#endif
29

30
#define E_SMART_OBJ_GET(smart, o, type) \
31
     { \
32
        if (!o) return; \
33
        if (!efl_isa(o, MY_CLASS)) { \
34
             ERR("Tried calling on a non-emotion object."); \
35
             return; \
36
        } \
37
        smart = efl_data_scope_get(o, MY_CLASS); \
38
        if (!smart) return; \
39
     }
40

41
#define E_SMART_OBJ_GET_RETURN(smart, o, type, ret) \
42
   { \
43
      if (!o) return ret; \
44
      if (!efl_isa(o, MY_CLASS)) { \
45
           ERR("Tried calling on a non-emotion object."); \
46
           return ret; \
47
      } \
48
      smart = efl_data_scope_get(o, MY_CLASS); \
49
      if (!smart) return ret; \
50
   }
51

52
#define E_OBJ_NAME "efl_canvas_video"
53

54
#ifdef MY_CLASS
55
# undef MY_CLASS
56
#endif
57

58
#define MY_CLASS EFL_CANVAS_VIDEO_CLASS
59

60
typedef struct _Efl_Canvas_Video_Data Efl_Canvas_Video_Data;
61
typedef struct _Emotion_Xattr_Data Emotion_Xattr_Data;
62

63
struct _Efl_Canvas_Video_Data
64
{
65
   Emotion_Engine_Instance *engine_instance;
66

67
   const char    *engine;
68
   const char    *file;
69
   Evas_Object   *obj;
70
   Evas_Object   *bg;
71

72
   Ecore_Job     *job;
73

74
   Emotion_Xattr_Data *xattr;
75

76
   const char *title;
77

78
   struct {
79
      const char *info;
80
      double  stat;
81
   } progress;
82
   struct {
83
      const char *file;
84
      int   num;
85
   } ref;
86
   struct {
87
      int button_num;
88
      int button;
89
   } spu;
90
   struct {
91
      int l; /* left */
92
      int r; /* right */
93
      int t; /* top */
94
      int b; /* bottom */
95
      Evas_Object *clipper;
96
   } crop;
97

98
   struct {
99
      int         w, h;
100
   } video;
101
   struct {
102
      double      w, h;
103
   } fill;
104

105
   double         ratio;
106
   double         pos;
107
   double         remember_jump;
108
   double         seek_pos;
109
   double         len;
110

111
   Emotion_Module_Options module_options;
112

113
   Emotion_Suspend state;
114
   Emotion_Aspect aspect;
115

116
   Ecore_Animator *anim;
117

118
   Eina_Bool open : 1;
119
   Eina_Bool play : 1;
120
   Eina_Bool pause : 1;
121
   Eina_Bool remember_play : 1;
122
   Eina_Bool seek : 1;
123
   Eina_Bool seeking : 1;
124
   Eina_Bool loaded : 1;
125
};
126

127
struct _Emotion_Xattr_Data
128
{
129
   EINA_REFCOUNT;
130
   Eo       *obj_wref;
131
#ifdef HAVE_EIO
132
   Eio_File *load;
133
   Eio_File *save;
134
#endif
135
};
136

137
static void _mouse_move(void *data, Evas *ev, Evas_Object *obj, void *event_info);
138
static void _mouse_down(void *data, Evas *ev, Evas_Object *obj, void *event_info);
139
static void _pos_set_job(void *data);
140
static void _pixels_get(void *data, Evas_Object *obj);
141

142
static void
143
_engine_init(Eo *obj, Efl_Canvas_Video_Data *sd)
144
{
145
   if (sd->engine_instance) return;
146
   sd->engine_instance = emotion_engine_instance_new(sd->engine, obj,
147
                                                     &(sd->module_options));
148
}
149

150
static void
151
_emotion_image_data_zero(Evas_Object *img)
152
{
153
   void *data = NULL;
154

155
   data = evas_object_image_data_get(img, 1);
156
   if (data)
157
     {
158
        int w, h, sz = 0;
159
        Evas_Colorspace cs;
160

161
        evas_object_image_size_get(img, &w, &h);
162
        cs = evas_object_image_colorspace_get(img);
163
        if (cs == EVAS_COLORSPACE_ARGB8888)
164
           sz = w * h * 4;
165
        if ((cs == EVAS_COLORSPACE_YCBCR422P601_PL) ||
166
            (cs == EVAS_COLORSPACE_YCBCR422P709_PL))
167
           sz = h * 2 * sizeof(unsigned char *);
168
        if (sz != 0) memset(data, 0, sz);
169
     }
170
   evas_object_image_data_set(img, data);
171
}
172

173
static void
174
_xattr_data_cancel(Emotion_Xattr_Data *xattr)
175
{
176
   (void) xattr;
177
#ifdef HAVE_EIO
178
   /* Only cancel the load_xattr or we will loose ref to time_seek stringshare */
179
   if (xattr->load) eio_file_cancel(xattr->load);
180
   xattr->load = NULL;
181
   if (xattr->save) eio_file_cancel(xattr->save);
182
   xattr->save = NULL;
183
#endif
184
}
185

186
static void
187
_xattr_data_unref(Emotion_Xattr_Data *xattr)
188
{
189
   EINA_REFCOUNT_UNREF(xattr) {} else return;
190

191
   _xattr_data_cancel(xattr);
192
   efl_wref_del_safe(&xattr->obj_wref);
193
   free(xattr);
194
}
195

196
static void
197
_clipper_position_size_update(Evas_Object *obj, int x, int y, int w, int h, int vid_w, int vid_h)
198
{
199
   Efl_Canvas_Video_Data *sd;
200
   double scale_w, scale_h;
201

202
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
203

204
   if (vid_w == 0 || vid_h == 0)
205
     {
206
       evas_object_image_fill_set(sd->obj, 0, 0, 0, 0);
207
       evas_object_move(sd->obj, x, y);
208
       evas_object_resize(sd->obj, 0, 0);
209
       if (!sd->crop.clipper) return;
210
       evas_object_move(sd->crop.clipper, x, y);
211
       evas_object_resize(sd->crop.clipper, 0, 0);
212
     }
213
   else
214
     {
215
       scale_w = (double)w / (double)(vid_w - sd->crop.l - sd->crop.r);
216
       scale_h = (double)h / (double)(vid_h - sd->crop.t - sd->crop.b);
217

218
       if (sd->fill.w < 0 && sd->fill.h < 0)
219
         evas_object_image_fill_set(sd->obj, 0, 0, vid_w * scale_w, vid_h * scale_h);
220
       else
221
         evas_object_image_fill_set(sd->obj, 0, 0, sd->fill.w * w, sd->fill.h * h);
222
       evas_object_resize(sd->obj, vid_w * scale_w, vid_h * scale_h);
223
       evas_object_move(sd->obj, x - sd->crop.l * scale_w, y - sd->crop.t * scale_h);
224
       if (!sd->crop.clipper) return;
225
       evas_object_move(sd->crop.clipper, x, y);
226
       evas_object_resize(sd->crop.clipper, w, h);
227
     }
228
}
229

230
/*******************************/
231
/* Externally accessible calls */
232
/*******************************/
233

234

235

236
EMOTION_API Evas_Object *
237
emotion_object_add(Evas *evas)
238
{
239
   evas = evas_find(evas);
240
   EINA_SAFETY_ON_FALSE_RETURN_VAL(efl_isa(evas, EVAS_CANVAS_CLASS), NULL);
241
   return efl_add(MY_CLASS, evas, efl_canvas_object_legacy_ctor(efl_added));
242
}
243

244
EOLIAN static Eo *
245
_efl_canvas_video_efl_object_constructor(Eo *obj, Efl_Canvas_Video_Data *pd)
246
{
247
   efl_canvas_group_clipped_set(obj, EINA_TRUE);
248
   obj = efl_constructor(efl_super(obj, MY_CLASS));
249
   efl_canvas_object_type_set(obj, E_OBJ_NAME);
250

251
   eina_stringshare_replace(&(pd->engine), "gstreamer1");
252
   pd->spu.button = -1;
253
   pd->ratio = 1.0;
254
   _engine_init(obj, pd);
255

256
   return obj;
257
}
258

259
EMOTION_API Evas_Object *
260
emotion_object_image_get(const Evas_Object *obj)
261
{
262
   Efl_Canvas_Video_Data *sd = efl_data_scope_safe_get(obj, MY_CLASS);
263
   if (!sd) return NULL;
264
   return sd->obj;
265
}
266

267
EOLIAN static void
268
_efl_canvas_video_option_set(Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *pd, const char *opt, const char *val)
269
{
270
   Efl_Canvas_Video_Data *sd = pd;
271

272
   if ((!opt) || (!val)) return;
273

274
   if (strcmp(opt, "video") == 0)
275
     {
276
        if (strcmp(val, "off") == 0)
277
          sd->module_options.no_video = EINA_TRUE;
278
        else if (strcmp(val, "on") == 0)
279
          sd->module_options.no_video = EINA_FALSE;
280
        else
281
          sd->module_options.no_video = !!atoi(val);
282

283
        ERR("Deprecated. Use emotion_object_video_mute_set()");
284
     }
285
   else if (strcmp(opt, "audio") == 0)
286
     {
287
        if (strcmp(val, "off") == 0)
288
          sd->module_options.no_audio = EINA_TRUE;
289
        else if (strcmp(val, "on") == 0)
290
          sd->module_options.no_audio = EINA_FALSE;
291
        else
292
          sd->module_options.no_audio = !!atoi(val);
293

294
        ERR("Deprecated. Use emotion_object_audio_mute_set()");
295
     }
296
   else
297
     ERR("Unsupported %s=%s", opt, val);
298
}
299

300
EOLIAN static Eina_Bool
301
_efl_canvas_video_engine_set(Eo *obj, Efl_Canvas_Video_Data *pd, const char *engine)
302
{
303
   Efl_Canvas_Video_Data *sd = pd;
304
   const char *file;
305

306
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
307

308
   if (!engine) engine = "gstreamer1";
309
   if (!strcmp(engine, sd->engine)) return EINA_TRUE;
310

311
   eina_stringshare_replace(&(sd->engine), engine);
312

313
   file = sd->file;
314
   sd->file = NULL;
315

316
   eina_stringshare_del(sd->title);
317
   sd->title = NULL;
318
   eina_stringshare_del(sd->progress.info);
319
   sd->progress.info = NULL;
320
   sd->progress.stat = 0.0;
321
   eina_stringshare_del(sd->ref.file);
322
   sd->ref.file = NULL;
323
   sd->ref.num = 0;
324
   sd->spu.button_num = 0;
325
   sd->spu.button = -1;
326
   sd->ratio = 1.0;
327
   sd->pos = 0;
328
   sd->remember_jump = 0;
329
   sd->seek_pos = 0;
330
   sd->len = 0;
331
   sd->remember_play = 0;
332

333
   if (sd->anim) ecore_animator_del(sd->anim);
334
   sd->anim = NULL;
335

336
   if (sd->engine_instance) emotion_engine_instance_del(sd->engine_instance);
337
   sd->engine_instance = NULL;
338
   _engine_init(obj, sd);
339
   if (!sd->engine_instance)
340
     {
341
        sd->file = file;
342
        return EINA_FALSE;
343
     }
344

345
   if (file)
346
     {
347
        emotion_object_file_set(obj, file);
348
        eina_stringshare_del(file);
349
     }
350

351
   return EINA_TRUE;
352
}
353

354
EMOTION_API Eina_Bool
355
emotion_object_file_set(Evas_Object *obj, const char *file)
356
{
357
   return efl_file_simple_load(obj, file, NULL);
358
}
359

360
EOLIAN static Eina_Error
361
_efl_canvas_video_efl_file_file_set(Eo *obj, Efl_Canvas_Video_Data *sd, const char *file)
362
{
363
   DBG("file=%s", file);
364

365
   eina_stringshare_replace(&sd->file, file);
366
   sd->loaded = 0;
367
   return efl_file_set(efl_super(obj, MY_CLASS), file);
368
}
369

370
EOLIAN static Eina_Bool
371
_efl_canvas_video_efl_file_loaded_get(const Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
372
{
373
   return sd->open && sd->loaded;
374
}
375

376
EOLIAN static void
377
_efl_canvas_video_efl_file_unload(Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
378
{
379
    if (sd->engine_instance) emotion_engine_instance_file_close(sd->engine_instance);
380
    sd->engine_instance = NULL;
381
    evas_object_image_data_set(sd->obj, NULL);
382
    evas_object_image_size_set(sd->obj, 1, 1);
383
    _emotion_image_data_zero(sd->obj);
384

385
   if (sd->anim) ecore_animator_del(sd->anim);
386
   sd->anim = NULL;
387

388
   _xattr_data_cancel(sd->xattr);
389
   sd->loaded = 0;
390
}
391

392
EOLIAN static Eina_Error
393
_efl_canvas_video_efl_file_load(Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
394
{
395
   const char *file = sd->file;
396
   if (!sd->engine_instance) _engine_init(obj, sd);
397
   if (!sd->engine_instance)
398
     {
399
        WRN("No engine chosen. Please set an engine.");
400
        return EFL_GFX_IMAGE_LOAD_ERROR_GENERIC;
401
     }
402

403
   sd->video.w = 0;
404
   sd->video.h = 0;
405
   if ((file) && (file[0] != 0))
406
     {
407
        char *file2 = NULL;
408

409
        emotion_engine_instance_file_close(sd->engine_instance);
410
        evas_object_image_data_set(sd->obj, NULL);
411
        evas_object_image_size_set(sd->obj, 1, 1);
412
        _emotion_image_data_zero(sd->obj);
413
        sd->open = 0;
414

415
        if (file)
416
          {
417
             file2 = eina_vpath_resolve(file);
418
          }
419

420
        if (!emotion_engine_instance_file_open(sd->engine_instance, file2))
421
          {
422
             WRN("Couldn't open file=%s", sd->file);
423
             return EFL_GFX_IMAGE_LOAD_ERROR_GENERIC;
424
          }
425
        free(file2);
426
        DBG("successfully opened file=%s", sd->file);
427
        sd->pos = 0.0;
428
        if (sd->play) emotion_engine_instance_play(sd->engine_instance, 0.0);
429
     }
430
   else
431
     {
432
        emotion_engine_instance_file_close(sd->engine_instance);
433
        evas_object_image_data_set(sd->obj, NULL);
434
        evas_object_image_size_set(sd->obj, 1, 1);
435
        _emotion_image_data_zero(sd->obj);
436
     }
437

438
   if (sd->anim) ecore_animator_del(sd->anim);
439
   sd->anim = NULL;
440

441
   _xattr_data_cancel(sd->xattr);
442
   sd->loaded = 1;
443

444
   return 0;
445
}
446

447
EMOTION_API const char *
448
emotion_object_file_get(const Evas_Object *obj)
449
{
450
   return efl_file_get(obj);
451
}
452

453
static void
454
_emotion_aspect_borders_apply(Evas_Object *obj, Efl_Canvas_Video_Data *sd, int w, int h, int iw, int ih)
455
{
456
   int x, y;
457

458
   evas_object_geometry_get(obj, &x, &y, NULL, NULL);
459

460
   /* applying calculated borders */
461
   if ((sd->crop.l == 0) && (sd->crop.r == 0) &&
462
       (sd->crop.t == 0) && (sd->crop.b == 0))
463
     {
464
        Evas_Object *old_clipper;
465
        if (sd->crop.clipper)
466
          {
467
             old_clipper = evas_object_clip_get(sd->crop.clipper);
468
             evas_object_clip_unset(sd->obj);
469
             evas_object_clip_set(sd->obj, old_clipper);
470
             evas_object_del(sd->crop.clipper);
471
             sd->crop.clipper = NULL;
472
          }
473
     }
474
   else
475
     {
476
        if (!sd->crop.clipper)
477
          {
478
             Evas_Object *old_clipper;
479
             sd->crop.clipper = evas_object_rectangle_add
480
               (evas_object_evas_get(obj));
481
             evas_object_smart_member_add(sd->crop.clipper, obj);
482
             old_clipper = evas_object_clip_get(sd->obj);
483
             evas_object_clip_set(sd->obj, sd->crop.clipper);
484
             evas_object_clip_set(sd->crop.clipper, old_clipper);
485
             evas_object_show(sd->crop.clipper);
486
          }
487
     }
488
   _clipper_position_size_update(obj, x, y, w, h, iw, ih);
489
}
490

491
static void
492
_efl_canvas_video_aspect_border_apply(Evas_Object *obj, Efl_Canvas_Video_Data *sd, int w, int h)
493
{
494
   int iw, ih;
495
   double ir;
496
   double r;
497

498
   int aspect_opt = 0;
499

500
   /* Prefer (if available) the video aspect ratio to calculate the sizes */
501
   if (sd->ratio > 0.0)
502
     {
503
        ir = sd->ratio;
504
        ih = sd->video.h;
505
        iw = (double)ih * ir;
506
     }
507
   else
508
     {
509
        iw = sd->video.w;
510
        ih = sd->video.h;
511
        ir = (double)iw / ih;
512
     }
513

514
   r = (double)w / h;
515

516
   /* First check if we should fit the width or height of the video inside the
517
    * width/height of the object.  This check takes into account the original
518
    * aspect ratio and the object aspect ratio, if we are keeping both sizes or
519
    * cropping the exceding area.
520
    */
521
   if (sd->aspect == EMOTION_ASPECT_KEEP_NONE)
522
     {
523
        sd->crop.l = 0;
524
        sd->crop.r = 0;
525
        sd->crop.t = 0;
526
        sd->crop.b = 0;
527
        aspect_opt = 0; // just ignore keep_aspect
528
     }
529
   else if (sd->aspect == EMOTION_ASPECT_KEEP_WIDTH)
530
     {
531
        aspect_opt = 1;
532
     }
533
   else if (sd->aspect == EMOTION_ASPECT_KEEP_HEIGHT)
534
     {
535
        aspect_opt = 2;
536
     }
537
   else if (sd->aspect == EMOTION_ASPECT_KEEP_BOTH)
538
     {
539
        if (ir > r) aspect_opt = 1;
540
        else aspect_opt = 2;
541
     }
542
   else if (sd->aspect == EMOTION_ASPECT_CROP)
543
     {
544
        if (ir > r) aspect_opt = 2;
545
        else aspect_opt = 1;
546
     }
547
   else if (sd->aspect == EMOTION_ASPECT_CUSTOM)
548
     {
549
        // nothing to do, just respect the border settings
550
        aspect_opt = 0;
551
     }
552

553
   /* updating borders based on keep_aspect settings */
554
   if (aspect_opt == 1) // keep width
555
     {
556
        int th, dh;
557
        double scale;
558

559
        sd->crop.l = 0;
560
        sd->crop.r = 0;
561
        scale = (double)iw / w;
562
        th = h * scale;
563
        dh = ih - th;
564
        sd->crop.t = sd->crop.b = dh / 2;
565
     }
566
   else if (aspect_opt == 2) // keep height
567
     {
568
        int tw, dw;
569
        double scale;
570

571
        sd->crop.t = 0;
572
        sd->crop.b = 0;
573
        scale = (double)ih / h;
574
        tw = w * scale;
575
        dw = iw - tw;
576
        sd->crop.l = sd->crop.r = dw / 2;
577
     }
578

579
   _emotion_aspect_borders_apply(obj, sd, w, h, iw, ih);
580
}
581

582
EMOTION_API void
583
emotion_object_border_set(Evas_Object *obj, int l, int r, int t, int b)
584
{
585
   Efl_Canvas_Video_Data *sd;
586
   int w, h;
587

588
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
589

590
   sd->aspect = EMOTION_ASPECT_CUSTOM;
591
   sd->crop.l = -l;
592
   sd->crop.r = -r;
593
   sd->crop.t = -t;
594
   sd->crop.b = -b;
595
   evas_object_geometry_get(obj, NULL, NULL, &w, &h);
596
   _efl_canvas_video_aspect_border_apply(obj, sd, w, h);
597
}
598

599
EMOTION_API void
600
emotion_object_border_get(const Evas_Object *obj, int *l, int *r, int *t, int *b)
601
{
602
   Efl_Canvas_Video_Data *sd;
603

604
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
605
   *l = -sd->crop.l;
606
   *r = -sd->crop.r;
607
   *t = -sd->crop.t;
608
   *b = -sd->crop.b;
609
}
610

611
EMOTION_API void
612
emotion_object_bg_color_set(Evas_Object *obj, int r, int g, int b, int a)
613
{
614
   Efl_Canvas_Video_Data *sd;
615

616
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
617
   evas_object_color_set(sd->bg, r, g, b, a);
618
}
619

620
EMOTION_API void
621
emotion_object_bg_color_get(const Evas_Object *obj, int *r, int *g, int *b, int *a)
622
{
623
   Efl_Canvas_Video_Data *sd;
624

625
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
626
   evas_object_color_get(sd->bg, r, g, b, a);
627
}
628

629
EMOTION_API void
630
emotion_object_keep_aspect_set(Evas_Object *obj, Emotion_Aspect a)
631
{
632
   Efl_Canvas_Video_Data *sd;
633
   int w, h;
634

635
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
636
   if (a == sd->aspect) return;
637

638
   sd->aspect = a;
639
   evas_object_geometry_get(obj, NULL, NULL, &w, &h);
640
   _efl_canvas_video_aspect_border_apply(obj, sd, w, h);
641
}
642

643
EMOTION_API Emotion_Aspect
644
emotion_object_keep_aspect_get(const Evas_Object *obj)
645
{
646
   Efl_Canvas_Video_Data *sd;
647

648
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, EMOTION_ASPECT_KEEP_NONE);
649
   return sd->aspect;
650
}
651

652
EMOTION_API void
653
emotion_object_play_set(Evas_Object *obj, Eina_Bool play)
654
{
655
   /* avoid calling playback_position_set(0) for legacy */
656
   if (play)
657
     efl_player_playing_set(obj, EINA_TRUE);
658
   efl_player_paused_set(obj, !play);
659
}
660

661
EOLIAN static Eina_Bool
662
_efl_canvas_video_efl_player_playing_set(Eo *obj, Efl_Canvas_Video_Data *sd, Eina_Bool play)
663
{
664
   play = !!play;
665
   DBG("play=" FMT_UCHAR ", was=" FMT_UCHAR, play, sd->play);
666
   if (!sd->engine_instance) return EINA_FALSE;
667
   /* always unset pause if playing is false */
668
   if (!play) sd->pause = EINA_FALSE;
669
   if (!sd->open)
670
     {
671
        sd->remember_play = play;
672
        return EINA_TRUE;
673
     }
674
   if (play == sd->play) return EINA_TRUE;
675
   sd->play = play;
676
   sd->remember_play = play;
677
   if (sd->state != EMOTION_WAKEUP) emotion_object_suspend_set(obj, EMOTION_WAKEUP);
678
   if (sd->play) emotion_engine_instance_play(sd->engine_instance, 0.0);
679
   else
680
     {
681
        emotion_engine_instance_stop(sd->engine_instance);
682
        efl_player_playback_position_set(obj, 0.0);
683
     }
684
   return EINA_TRUE;
685
}
686

687
EOLIAN static Eina_Bool
688
_efl_canvas_video_efl_player_paused_set(Eo *obj, Efl_Canvas_Video_Data *sd, Eina_Bool paused)
689
{
690
   paused = !!paused;
691
   DBG("paused=" FMT_UCHAR ", was=" FMT_UCHAR, paused, sd->pause);
692
   if (!sd->engine_instance) return EINA_FALSE;
693
   if (!sd->open)
694
     {
695
        /* queue pause */
696
        if (sd->remember_play)
697
          sd->pause = paused;
698
        return sd->remember_play;
699
     }
700
   if (!sd->play) return EINA_FALSE;
701
   if (paused == sd->pause) return EINA_TRUE;
702
   sd->pause = paused;
703
   if (sd->pause)
704
     emotion_engine_instance_stop(sd->engine_instance);
705
   else
706
     {
707
        if (sd->state != EMOTION_WAKEUP) emotion_object_suspend_set(obj, EMOTION_WAKEUP);
708
        emotion_engine_instance_play(sd->engine_instance, sd->pos);
709
     }
710
   return EINA_TRUE;
711
}
712

713
EMOTION_API Eina_Bool
714
emotion_object_play_get(const Evas_Object *obj)
715
{
716
   return efl_player_playing_get(obj) && !efl_player_paused_get(obj);
717
}
718

719
EOLIAN static Eina_Bool
720
_efl_canvas_video_efl_player_playing_get(const Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
721
{
722
   if (!sd->engine_instance) return EINA_FALSE;
723
   return sd->play;
724
}
725

726
EOLIAN static Eina_Bool
727
_efl_canvas_video_efl_player_paused_get(const Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
728
{
729
   if (!sd->engine_instance) return EINA_FALSE;
730
   if (!sd->play) return EINA_FALSE;
731
   return sd->pause;
732
}
733

734
EMOTION_API void
735
emotion_object_position_set(Evas_Object *obj, double sec)
736
{
737
   efl_player_playback_position_set(obj, sec);
738
}
739

740
EOLIAN static void
741
_efl_canvas_video_efl_player_playback_position_set(Eo *obj, Efl_Canvas_Video_Data *sd, double sec)
742
{
743
   DBG("sec=%f", sec);
744
   if (!sd->engine_instance) return;
745
   if (sec < 0.0) sec = 0.0;
746
   if (!sd->open)
747
     {
748
        sd->remember_jump = sec;
749
        return;
750
     }
751
   sd->remember_jump = 0;
752
   sd->seek_pos = sec;
753
   sd->seek = 1;
754
   sd->pos = sd->seek_pos;
755
   if (sd->job) ecore_job_del(sd->job);
756
   sd->job = ecore_job_add(_pos_set_job, obj);
757
}
758

759
EMOTION_API double
760
emotion_object_position_get(const Evas_Object *obj)
761
{
762
   return efl_player_playback_position_get(obj);
763
}
764

765
EOLIAN static double
766
_efl_canvas_video_efl_player_playback_position_get(const Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
767
{
768
   if (!sd->engine_instance) return 0.0;
769
   sd->pos = emotion_engine_instance_pos_get(sd->engine_instance);
770
   return sd->pos;
771
}
772

773
EMOTION_API double
774
emotion_object_buffer_size_get(const Evas_Object *obj)
775
{
776
   Efl_Canvas_Video_Data *sd;
777

778
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 1.0);
779
   if (!sd->engine_instance) return 0.0;
780
   return emotion_engine_instance_buffer_size_get(sd->engine_instance);
781
}
782

783
EMOTION_API Eina_Bool
784
emotion_object_seekable_get(const Evas_Object *obj)
785
{
786
   return efl_playable_seekable_get(obj);
787
}
788

789
EMOTION_API Eina_Bool
790
emotion_object_video_handled_get(const Evas_Object *obj)
791
{
792
   Efl_Canvas_Video_Data *sd;
793

794
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
795
   if (!sd->engine_instance) return EINA_FALSE;
796
   return emotion_engine_instance_video_handled(sd->engine_instance);
797
}
798

799
EMOTION_API Eina_Bool
800
emotion_object_audio_handled_get(const Evas_Object *obj)
801
{
802
   Efl_Canvas_Video_Data *sd;
803

804
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
805
   if (!sd->engine_instance) return EINA_FALSE;
806
   return emotion_engine_instance_audio_handled(sd->engine_instance);
807
}
808

809
EMOTION_API double
810
emotion_object_play_length_get(const Evas_Object *obj)
811
{
812
   return efl_playable_length_get(obj);
813
}
814

815
EMOTION_API void
816
emotion_object_size_get(const Evas_Object *obj, int *iw, int *ih)
817
{
818
   Eina_Size2D sz;
819

820
   sz = efl_gfx_image_load_controller_load_size_get(obj);
821
   if (iw) *iw = sz.w;
822
   if (ih) *ih = sz.h;
823
}
824

825
EOLIAN static Eina_Size2D
826
_efl_canvas_video_efl_gfx_image_load_controller_load_size_get(const Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
827
{
828
   // FIXME: Shouldn't this be efl_gfx_view_size instead?
829
   return EINA_SIZE2D(sd->video.w, sd->video.h);
830
}
831

832
EMOTION_API void
833
emotion_object_smooth_scale_set(Evas_Object *obj, Eina_Bool smooth)
834
{
835
   efl_gfx_image_smooth_scale_set(obj, smooth);
836
}
837

838
EOLIAN static void
839
_efl_canvas_video_efl_gfx_image_smooth_scale_set(Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd, Eina_Bool smooth)
840
{
841
   evas_object_image_smooth_scale_set(sd->obj, smooth);
842
}
843

844
EMOTION_API Eina_Bool
845
emotion_object_smooth_scale_get(const Evas_Object *obj)
846
{
847
   return efl_gfx_image_smooth_scale_get(obj);
848
}
849

850
EOLIAN static Eina_Bool
851
_efl_canvas_video_efl_gfx_image_smooth_scale_get(const Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
852
{
853
   return evas_object_image_smooth_scale_get(sd->obj);
854
}
855

856
EMOTION_API double
857
emotion_object_ratio_get(const Evas_Object *obj)
858
{
859
   return efl_gfx_image_ratio_get(obj);
860
}
861

862
EOLIAN static double
863
_efl_canvas_video_efl_gfx_image_ratio_get(const Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
864
{
865
   if (!sd->engine_instance) return 0.0;
866
   return sd->ratio;
867
}
868

869
/*
870
 * Send a control event to the DVD.
871
 */
872
EMOTION_API void
873
emotion_object_event_simple_send(Evas_Object *obj, Emotion_Event ev)
874
{
875
   Efl_Canvas_Video_Data *sd;
876

877
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
878
   if (!sd->engine_instance) return;
879
   emotion_engine_instance_event_feed(sd->engine_instance, ev);
880
}
881

882
EMOTION_API void
883
emotion_object_audio_volume_set(Evas_Object *obj, double vol)
884
{
885
   efl_audio_control_volume_set(obj, vol);
886
}
887

888
EOLIAN static void
889
_efl_canvas_video_efl_audio_control_volume_set(Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd, double vol)
890
{
891
   DBG("vol=%f", vol);
892
   if (!sd->engine_instance) return;
893
   emotion_engine_instance_audio_channel_volume_set(sd->engine_instance, vol);
894
}
895

896
EMOTION_API double
897
emotion_object_audio_volume_get(const Evas_Object *obj)
898
{
899
   return efl_audio_control_volume_get(obj);
900
}
901

902
EOLIAN static double
903
_efl_canvas_video_efl_audio_control_volume_get(const Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
904
{
905
   if (!sd->engine_instance) return 0.0;
906
   return emotion_engine_instance_audio_channel_volume_get(sd->engine_instance);
907
}
908

909
EMOTION_API void
910
emotion_object_audio_mute_set(Evas_Object *obj, Eina_Bool mute)
911
{
912
   efl_audio_control_mute_set(obj, mute);
913
}
914

915
EOLIAN static void
916
_efl_canvas_video_efl_audio_control_mute_set(Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd, Eina_Bool mute)
917
{
918
   DBG("mute=" FMT_UCHAR, mute);
919
   if (!sd->engine_instance) return;
920
   emotion_engine_instance_audio_channel_mute_set(sd->engine_instance, mute);
921
}
922

923
EMOTION_API Eina_Bool
924
emotion_object_audio_mute_get(const Evas_Object *obj)
925
{
926
   return efl_audio_control_mute_get(obj);
927
}
928

929
EOLIAN static Eina_Bool
930
_efl_canvas_video_efl_audio_control_mute_get(const Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
931
{
932
   if (!sd->engine_instance) return EINA_FALSE;
933
   return emotion_engine_instance_audio_channel_mute_get(sd->engine_instance);
934
}
935

936
EMOTION_API int
937
emotion_object_audio_channel_count(const Evas_Object *obj)
938
{
939
   Efl_Canvas_Video_Data *sd;
940

941
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
942
   if (!sd->engine_instance) return 0;
943
   return emotion_engine_instance_audio_channel_count(sd->engine_instance);
944
}
945

946
EMOTION_API const char *
947
emotion_object_audio_channel_name_get(const Evas_Object *obj, int channel)
948
{
949
   Efl_Canvas_Video_Data *sd;
950

951
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, NULL);
952
   if (!sd->engine_instance) return NULL;
953
   return emotion_engine_instance_audio_channel_name_get(sd->engine_instance, channel);
954
}
955

956
EMOTION_API void
957
emotion_object_audio_channel_set(Evas_Object *obj, int channel)
958
{
959
   Efl_Canvas_Video_Data *sd;
960

961
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
962
   DBG("channel=%d", channel);
963
   if (!sd->engine_instance) return;
964
   emotion_engine_instance_audio_channel_set(sd->engine_instance, channel);
965
}
966

967
EMOTION_API int
968
emotion_object_audio_channel_get(const Evas_Object *obj)
969
{
970
   Efl_Canvas_Video_Data *sd;
971

972
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
973
   if (!sd->engine_instance) return 0;
974
   return emotion_engine_instance_audio_channel_get(sd->engine_instance);
975
}
976

977
EMOTION_API void
978
emotion_object_video_mute_set(Evas_Object *obj, Eina_Bool mute)
979
{
980
   Efl_Canvas_Video_Data *sd;
981

982
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
983
   DBG("mute=" FMT_UCHAR, mute);
984
   if (!sd->engine_instance) return;
985
   emotion_engine_instance_video_channel_mute_set(sd->engine_instance, mute);
986
}
987

988
EMOTION_API Eina_Bool
989
emotion_object_video_mute_get(const Evas_Object *obj)
990
{
991
   Efl_Canvas_Video_Data *sd;
992

993
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
994
   if (!sd->engine_instance) return EINA_FALSE;
995
   return emotion_engine_instance_video_channel_mute_get(sd->engine_instance);
996
}
997

998
EMOTION_API void
999
emotion_object_video_subtitle_file_set(Evas_Object *obj, const char *filepath)
1000
{
1001
   Efl_Canvas_Video_Data *sd;
1002

1003
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1004
   DBG("subtitle=%s", filepath);
1005
   if (!sd->engine_instance) _engine_init(obj, sd);
1006
   if (!sd->engine_instance) return;
1007
   emotion_engine_instance_video_subtitle_file_set(sd->engine_instance, filepath);
1008
}
1009

1010
EMOTION_API const char *
1011
emotion_object_video_subtitle_file_get(const Evas_Object *obj)
1012
{
1013
   Efl_Canvas_Video_Data *sd;
1014

1015
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
1016
   if (!sd->engine_instance) return NULL;
1017
   return emotion_engine_instance_video_subtitle_file_get(sd->engine_instance);
1018
}
1019

1020
EMOTION_API int
1021
emotion_object_video_channel_count(const Evas_Object *obj)
1022
{
1023
   Efl_Canvas_Video_Data *sd;
1024

1025
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
1026
   if (!sd->engine_instance) return 0;
1027
   return emotion_engine_instance_video_channel_count(sd->engine_instance);
1028
}
1029

1030
EMOTION_API const char *
1031
emotion_object_video_channel_name_get(const Evas_Object *obj, int channel)
1032
{
1033
   Efl_Canvas_Video_Data *sd;
1034

1035
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, NULL);
1036
   if (!sd->engine_instance) return NULL;
1037
   return emotion_engine_instance_video_channel_name_get(sd->engine_instance, channel);
1038
}
1039

1040
EMOTION_API void
1041
emotion_object_video_channel_set(Evas_Object *obj, int channel)
1042
{
1043
   Efl_Canvas_Video_Data *sd;
1044

1045
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1046
   DBG("channel=%d", channel);
1047
   if (!sd->engine_instance) return;
1048
   emotion_engine_instance_video_channel_set(sd->engine_instance, channel);
1049
}
1050

1051
EMOTION_API int
1052
emotion_object_video_channel_get(const Evas_Object *obj)
1053
{
1054
   Efl_Canvas_Video_Data *sd;
1055

1056
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
1057
   if (!sd->engine_instance) return 0;
1058
   return emotion_engine_instance_video_channel_get(sd->engine_instance);
1059
}
1060

1061
EMOTION_API void
1062
emotion_object_spu_mute_set(Evas_Object *obj, Eina_Bool mute)
1063
{
1064
   Efl_Canvas_Video_Data *sd;
1065

1066
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1067
   DBG("mute=" FMT_UCHAR, mute);
1068
   if (!sd->engine_instance) return;
1069
   emotion_engine_instance_spu_channel_mute_set(sd->engine_instance, mute);
1070
}
1071

1072
EMOTION_API Eina_Bool
1073
emotion_object_spu_mute_get(const Evas_Object *obj)
1074
{
1075
   Efl_Canvas_Video_Data *sd;
1076

1077
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
1078
   if (!sd->engine_instance) return EINA_FALSE;
1079
   return emotion_engine_instance_spu_channel_mute_get(sd->engine_instance);
1080
}
1081

1082
EMOTION_API int
1083
emotion_object_spu_channel_count(const Evas_Object *obj)
1084
{
1085
   Efl_Canvas_Video_Data *sd;
1086

1087
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
1088
   if (!sd->engine_instance) return 0;
1089
   return emotion_engine_instance_spu_channel_count(sd->engine_instance);
1090
}
1091

1092
EMOTION_API const char *
1093
emotion_object_spu_channel_name_get(const Evas_Object *obj, int channel)
1094
{
1095
   Efl_Canvas_Video_Data *sd;
1096

1097
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, NULL);
1098
   if (!sd->engine_instance) return NULL;
1099
   return emotion_engine_instance_spu_channel_name_get(sd->engine_instance, channel);
1100
}
1101

1102
EMOTION_API void
1103
emotion_object_spu_channel_set(Evas_Object *obj, int channel)
1104
{
1105
   Efl_Canvas_Video_Data *sd;
1106

1107
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1108
   DBG("channel=%d", channel);
1109
   if (!sd->engine_instance) return;
1110
   emotion_engine_instance_spu_channel_set(sd->engine_instance, channel);
1111
}
1112

1113
EMOTION_API int
1114
emotion_object_spu_channel_get(const Evas_Object *obj)
1115
{
1116
   Efl_Canvas_Video_Data *sd;
1117

1118
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
1119
   if (!sd->engine_instance) return 0;
1120
   return emotion_engine_instance_spu_channel_get(sd->engine_instance);
1121
}
1122

1123
EMOTION_API int
1124
emotion_object_chapter_count(const Evas_Object *obj)
1125
{
1126
   Efl_Canvas_Video_Data *sd;
1127

1128
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
1129
   if (!sd->engine_instance) return 0;
1130
   return emotion_engine_instance_chapter_count(sd->engine_instance);
1131
}
1132

1133
EMOTION_API void
1134
emotion_object_chapter_set(Evas_Object *obj, int chapter)
1135
{
1136
   Efl_Canvas_Video_Data *sd;
1137

1138
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1139
   DBG("chapter=%d", chapter);
1140
   if (!sd->engine_instance) return;
1141
   emotion_engine_instance_chapter_set(sd->engine_instance, chapter);
1142
}
1143

1144
EMOTION_API int
1145
emotion_object_chapter_get(const Evas_Object *obj)
1146
{
1147
   Efl_Canvas_Video_Data *sd;
1148

1149
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
1150
   if (!sd->engine_instance) return 0;
1151
   return emotion_engine_instance_chapter_get(sd->engine_instance);
1152
}
1153

1154
EMOTION_API const char *
1155
emotion_object_chapter_name_get(const Evas_Object *obj, int chapter)
1156
{
1157
   Efl_Canvas_Video_Data *sd;
1158

1159
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, NULL);
1160
   if (!sd->engine_instance) return NULL;
1161
   return emotion_engine_instance_chapter_name_get(sd->engine_instance, chapter);
1162
}
1163

1164
EMOTION_API void
1165
emotion_object_play_speed_set(Evas_Object *obj, double speed)
1166
{
1167
   Efl_Canvas_Video_Data *sd;
1168

1169
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1170
   DBG("speed=%f", speed);
1171
   if (!sd->engine_instance) return;
1172
   emotion_engine_instance_speed_set(sd->engine_instance, speed);
1173
}
1174

1175
EMOTION_API double
1176
emotion_object_play_speed_get(const Evas_Object *obj)
1177
{
1178
   Efl_Canvas_Video_Data *sd;
1179

1180
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0.0);
1181
   if (!sd->engine_instance) return 0.0;
1182
   return emotion_engine_instance_speed_get(sd->engine_instance);
1183
}
1184

1185
EMOTION_API void
1186
emotion_object_eject(Evas_Object *obj)
1187
{
1188
   Efl_Canvas_Video_Data *sd;
1189

1190
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1191
   if (!sd->engine_instance) return;
1192
   emotion_engine_instance_eject(sd->engine_instance);
1193
}
1194

1195
EMOTION_API const char *
1196
emotion_object_title_get(const Evas_Object *obj)
1197
{
1198
   Efl_Canvas_Video_Data *sd;
1199

1200
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, NULL);
1201
   return sd->title;
1202
}
1203

1204
EMOTION_API const char *
1205
emotion_object_progress_info_get(const Evas_Object *obj)
1206
{
1207
   Efl_Canvas_Video_Data *sd;
1208

1209
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, NULL);
1210
   return sd->progress.info;
1211
}
1212

1213
EMOTION_API double
1214
emotion_object_progress_status_get(const Evas_Object *obj)
1215
{
1216
   return efl_player_playback_progress_get(obj);
1217
}
1218

1219
EOLIAN static double
1220
_efl_canvas_video_efl_player_playback_progress_get(const Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
1221
{
1222
   return sd->progress.stat;
1223
}
1224

1225
EOLIAN static void
1226
_efl_canvas_video_efl_player_playback_progress_set(Eo *obj, Efl_Canvas_Video_Data *sd EINA_UNUSED, double progress)
1227
{
1228
   const char *info = emotion_object_progress_info_get((const Evas_Object*)obj);
1229
   _emotion_progress_set(obj, (char*)info, progress);
1230
}
1231

1232
EOLIAN static double
1233
_efl_canvas_video_efl_playable_length_get(const Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
1234
{
1235
   if (!sd->engine_instance) return 0.0;
1236
   sd->len = emotion_engine_instance_len_get(sd->engine_instance);
1237
   return sd->len;
1238
}
1239

1240
EOLIAN static Eina_Bool
1241
_efl_canvas_video_efl_playable_seekable_get(const Eo *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
1242
{
1243
   if (!sd->engine_instance) return EINA_FALSE;
1244
   return emotion_engine_instance_seekable(sd->engine_instance);
1245
}
1246

1247
EMOTION_API const char *
1248
emotion_object_ref_file_get(const Evas_Object *obj)
1249
{
1250
   Efl_Canvas_Video_Data *sd;
1251

1252
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, NULL);
1253
   return sd->ref.file;
1254
}
1255

1256
EMOTION_API int
1257
emotion_object_ref_num_get(const Evas_Object *obj)
1258
{
1259
   Efl_Canvas_Video_Data *sd;
1260

1261
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
1262
   return sd->ref.num;
1263
}
1264

1265
EMOTION_API int
1266
emotion_object_spu_button_count_get(const Evas_Object *obj)
1267
{
1268
   Efl_Canvas_Video_Data *sd;
1269

1270
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
1271
   return sd->spu.button_num;
1272
}
1273

1274
EMOTION_API int
1275
emotion_object_spu_button_get(const Evas_Object *obj)
1276
{
1277
   Efl_Canvas_Video_Data *sd;
1278

1279
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
1280
   return sd->spu.button;
1281
}
1282

1283
EMOTION_API const char *
1284
emotion_object_meta_info_get(const Evas_Object *obj, Emotion_Meta_Info meta)
1285
{
1286
   Efl_Canvas_Video_Data *sd;
1287
   int id;
1288

1289
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, NULL);
1290
   if (!sd->engine_instance) return NULL;
1291
   switch (meta)
1292
     {
1293
      case EMOTION_META_INFO_TRACK_TITLE:
1294
         id = META_TRACK_TITLE;
1295
         break;
1296
      case EMOTION_META_INFO_TRACK_ARTIST:
1297
         id = META_TRACK_ARTIST;
1298
         break;
1299
      case EMOTION_META_INFO_TRACK_ALBUM:
1300
         id = META_TRACK_ALBUM;
1301
         break;
1302
      case EMOTION_META_INFO_TRACK_YEAR:
1303
         id = META_TRACK_YEAR;
1304
         break;
1305
      case EMOTION_META_INFO_TRACK_GENRE:
1306
         id = META_TRACK_GENRE;
1307
         break;
1308
      case EMOTION_META_INFO_TRACK_COMMENT:
1309
         id = META_TRACK_COMMENT;
1310
         break;
1311
      case EMOTION_META_INFO_TRACK_DISC_ID:
1312
         id = META_TRACK_DISCID;
1313
        break;
1314
      default:
1315
         ERR("Unknown meta info id: %d", meta);
1316
         return NULL;
1317
     }
1318

1319
   return emotion_engine_instance_meta_get(sd->engine_instance, id);
1320
}
1321

1322

1323
EMOTION_API Evas_Object *
1324
emotion_file_meta_artwork_get(const Evas_Object *obj, const char *path, Emotion_Artwork_Info type)
1325
{
1326
   Efl_Canvas_Video_Data *sd;
1327
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, NULL);
1328
   if (!sd->engine_instance) return NULL;
1329

1330
   Evas *ev = evas_object_evas_get(obj);
1331
   Evas_Object *artwork = evas_object_image_add(ev);
1332

1333
   Evas_Object *result = emotion_engine_instance_meta_artwork_get(sd->engine_instance, artwork, path, type);
1334
   if (!result) return NULL;
1335

1336
   Evas_Load_Error _error = evas_object_image_load_error_get(result);
1337
   if (_error != EVAS_LOAD_ERROR_NONE) return NULL;
1338

1339
   return result;
1340
}
1341

1342
EMOTION_API void
1343
emotion_object_vis_set(Evas_Object *obj, Emotion_Vis visualization)
1344
{
1345
   Efl_Canvas_Video_Data *sd;
1346

1347
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1348
   DBG("visualization=%d", visualization);
1349
   if (!sd->engine_instance) return;
1350
   emotion_engine_instance_vis_set(sd->engine_instance, visualization);
1351
}
1352

1353
EMOTION_API Emotion_Vis
1354
emotion_object_vis_get(const Evas_Object *obj)
1355
{
1356
   Efl_Canvas_Video_Data *sd;
1357

1358
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, EMOTION_VIS_NONE);
1359
   if (!sd->engine_instance) return EMOTION_VIS_NONE;
1360
   return emotion_engine_instance_vis_get(sd->engine_instance);
1361
}
1362

1363
EMOTION_API Eina_Bool
1364
emotion_object_vis_supported(const Evas_Object *obj, Emotion_Vis visualization)
1365
{
1366
   Efl_Canvas_Video_Data *sd;
1367

1368
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, EINA_FALSE);
1369
   if (!sd->engine_instance) return EINA_FALSE;
1370
   return emotion_engine_instance_vis_supported(sd->engine_instance, visualization);
1371
}
1372

1373
EMOTION_API void
1374
emotion_object_priority_set(Evas_Object *obj, Eina_Bool priority)
1375
{
1376
   Efl_Canvas_Video_Data *sd;
1377

1378
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1379
   if (!sd->engine_instance) return;
1380
   emotion_engine_instance_priority_set(sd->engine_instance, priority);
1381
}
1382

1383
EMOTION_API Eina_Bool
1384
emotion_object_priority_get(const Evas_Object *obj)
1385
{
1386
   Efl_Canvas_Video_Data *sd;
1387

1388
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, EINA_FALSE);
1389
   if (!sd->engine_instance) return EINA_FALSE;
1390
   return emotion_engine_instance_priority_get(sd->engine_instance);
1391
}
1392

1393
#ifdef HAVE_EIO
1394
static void
1395
_eio_load_xattr_cleanup(Emotion_Xattr_Data *xattr, Eio_File *handler)
1396
{
1397
   if (handler == xattr->load) xattr->load = NULL;
1398
   _xattr_data_unref(xattr);
1399
}
1400

1401
static void
1402
_eio_load_xattr_done(void *data, Eio_File *handler, double xattr_double)
1403
{
1404
   Emotion_Xattr_Data *xattr = data;
1405

1406
   emotion_object_position_set(evas_object_smart_parent_get(xattr->obj_wref), xattr_double);
1407
   efl_event_callback_call(evas_object_smart_parent_get(xattr->obj_wref), EFL_CANVAS_VIDEO_EVENT_POSITION_LOAD_DONE, NULL);
1408
   evas_object_smart_callback_call(evas_object_smart_parent_get(xattr->obj_wref), "position_load,succeed", NULL);
1409
   _eio_load_xattr_cleanup(xattr, handler);
1410
}
1411

1412
static void
1413
_eio_load_xattr_error(void *data, Eio_File *handler, int err EINA_UNUSED)
1414
{
1415
   Emotion_Xattr_Data *xattr = data;
1416

1417
   efl_event_callback_call(evas_object_smart_parent_get(xattr->obj_wref), EFL_CANVAS_VIDEO_EVENT_POSITION_LOAD_FAIL, NULL);
1418
   evas_object_smart_callback_call(evas_object_smart_parent_get(xattr->obj_wref), "position_load,failed", NULL);
1419
   _eio_load_xattr_cleanup(xattr, handler);
1420
}
1421
#endif
1422

1423
EMOTION_API void
1424
emotion_object_last_position_load(Evas_Object *obj)
1425
{
1426
   Efl_Canvas_Video_Data *sd;
1427
   const char *tmp;
1428
#ifndef HAVE_EIO
1429
   double xattr;
1430
#endif
1431

1432
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1433
   if (!sd->file) return;
1434

1435
   if (!strncmp(sd->file, "file://", 7)) tmp = sd->file + 7;
1436
   else if (!strstr(sd->file, "://")) tmp = sd->file;
1437
   else return;
1438

1439
#ifdef HAVE_EIO
1440
   Emotion_Xattr_Data *xattr = sd->xattr;
1441

1442
   if (xattr->load) return;
1443
   EINA_REFCOUNT_REF(xattr);
1444

1445
   xattr->load = eio_file_xattr_double_get(tmp,
1446
                                           "user.e.time_seek",
1447
                                           _eio_load_xattr_done,
1448
                                           _eio_load_xattr_error,
1449
                                           xattr);
1450
#else
1451
   if (eina_xattr_double_get(tmp, "user.e.time_seek", &xattr))
1452
     {
1453
        emotion_object_position_set(obj, xattr);
1454
        efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_POSITION_LOAD_DONE, NULL);
1455
        evas_object_smart_callback_call(obj, "position_load,succeed", NULL);
1456
     }
1457
   else
1458
     {
1459
        efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_POSITION_LOAD_FAIL, NULL);
1460
        evas_object_smart_callback_call(obj, "position_load,failed", NULL);
1461
     }
1462
#endif
1463
}
1464

1465
#ifdef HAVE_EIO
1466
static void
1467
_eio_save_xattr_cleanup(Emotion_Xattr_Data *xattr, Eio_File *handler)
1468
{
1469
   if (handler == xattr->save) xattr->save = NULL;
1470
   _xattr_data_unref(xattr);
1471
}
1472

1473
static void
1474
_eio_save_xattr_done(void *data, Eio_File *handler)
1475
{
1476
   Emotion_Xattr_Data *xattr = data;
1477

1478
   efl_event_callback_call(xattr->obj_wref, EFL_CANVAS_VIDEO_EVENT_POSITION_SAVE_DONE, NULL);
1479
   evas_object_smart_callback_call(xattr->obj_wref, "position_save,succeed", NULL);
1480
   _eio_save_xattr_cleanup(xattr, handler);
1481
}
1482

1483
static void
1484
_eio_save_xattr_error(void *data, Eio_File *handler, int err EINA_UNUSED)
1485
{
1486
   Emotion_Xattr_Data *xattr = data;
1487

1488
   efl_event_callback_call(xattr->obj_wref, EFL_CANVAS_VIDEO_EVENT_POSITION_SAVE_FAIL, NULL);
1489
   evas_object_smart_callback_call(xattr->obj_wref, "position_save,failed", NULL);
1490
   _eio_save_xattr_cleanup(xattr, handler);
1491
}
1492
#endif
1493

1494
EMOTION_API void
1495
emotion_object_last_position_save(Evas_Object *obj)
1496
{
1497
   Efl_Canvas_Video_Data *sd;
1498
   const char *tmp;
1499

1500
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1501
   if (!sd->file) return;
1502

1503
   if (!strncmp(sd->file, "file://", 7)) tmp = sd->file + 7;
1504
   else if (!strstr(sd->file, "://")) tmp = sd->file;
1505
   else return;
1506
#ifdef HAVE_EIO
1507
   Emotion_Xattr_Data *xattr = sd->xattr;
1508

1509
   if (xattr->save) return;
1510
   EINA_REFCOUNT_REF(xattr);
1511

1512
   xattr->save = eio_file_xattr_double_set(tmp,
1513
                                           "user.e.time_seek",
1514
                                           emotion_object_position_get(obj),
1515
                                           0,
1516
                                           _eio_save_xattr_done,
1517
                                           _eio_save_xattr_error,
1518
                                           xattr);
1519
#else
1520
   if (eina_xattr_double_set(tmp, "user.e.time_seek", emotion_object_position_get(obj), 0))
1521
     {
1522
        efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_POSITION_SAVE_DONE, NULL);
1523
        evas_object_smart_callback_call(obj, "position_save,succeed", NULL);
1524
     }
1525
   else
1526
     {
1527
        efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_POSITION_SAVE_FAIL, NULL);
1528
        evas_object_smart_callback_call(obj, "position_save,failed", NULL);
1529
     }
1530
#endif
1531
}
1532

1533
EMOTION_API void
1534
emotion_object_suspend_set(Evas_Object *obj, Emotion_Suspend state)
1535
{
1536
   Efl_Canvas_Video_Data *sd;
1537

1538
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1539
   switch (state)
1540
     {
1541
      case EMOTION_WAKEUP:
1542
         /* Restore the rendering pipeline, offset and everything back to play again (this will be called automatically by play_set) */
1543
      case EMOTION_SLEEP:
1544
         /* This destroy some part of the rendering pipeline */
1545
      case EMOTION_DEEP_SLEEP:
1546
         /* This destroy all the rendering pipeline and just keep the last rendered image (fullscreen) */
1547
      case EMOTION_HIBERNATE:
1548
         /* This destroy all the rendering pipeline and keep 1/4 of the last rendered image */
1549
      default:
1550
         break;
1551
     }
1552
   sd->state = state;
1553
}
1554

1555
EMOTION_API Emotion_Suspend
1556
emotion_object_suspend_get(Evas_Object *obj)
1557
{
1558
   Efl_Canvas_Video_Data *sd;
1559

1560
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, EMOTION_WAKEUP);
1561
   return sd->state;
1562
}
1563

1564
/*****************************/
1565
/* Utility calls for modules */
1566
/*****************************/
1567

1568
EMOTION_API void *
1569
_emotion_video_get(const Evas_Object *obj)
1570
{
1571
   Efl_Canvas_Video_Data *sd;
1572

1573
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, NULL);
1574
   return emotion_engine_instance_data_get(sd->engine_instance);
1575
}
1576

1577
static Eina_Bool
1578
_emotion_frame_anim(void *data)
1579
{
1580
   Evas_Object *obj = data;
1581
   Efl_Canvas_Video_Data *sd;
1582

1583
   E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, EINA_FALSE);
1584

1585
   sd->anim = NULL;
1586
   evas_object_image_pixels_dirty_set(sd->obj, 1);
1587
   _emotion_video_pos_update(obj,
1588
                             emotion_engine_instance_pos_get(sd->engine_instance),
1589
                             emotion_engine_instance_len_get(sd->engine_instance));
1590
   efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_FRAME_DECODE, NULL);
1591
   evas_object_smart_callback_call(obj, "frame_decode", NULL);
1592
   return EINA_FALSE;
1593
}
1594

1595
EMOTION_API void
1596
_emotion_frame_new(Evas_Object *obj)
1597
{
1598
   Efl_Canvas_Video_Data *sd;
1599

1600
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1601
   if (!sd->anim)
1602
     sd->anim = ecore_evas_animator_add(obj, _emotion_frame_anim, obj);
1603
}
1604

1605
EMOTION_API void
1606
_emotion_video_pos_update(Evas_Object *obj, double pos, double len)
1607
{
1608
   Efl_Canvas_Video_Data *sd;
1609
   int npos = 0, nlen = 0;
1610

1611
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1612
   if (!EINA_DBL_EQ(pos, sd->pos)) npos = 1;
1613
   if (!EINA_DBL_EQ(len, sd->len)) nlen = 1;
1614
   sd->pos = pos;
1615
   sd->len = len;
1616
   if (npos)
1617
     {
1618
        efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_POSITION_CHANGE, NULL);
1619
        evas_object_smart_callback_call(obj, "position_update", NULL);
1620
     }
1621
   if (nlen)
1622
     {
1623
        efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_LENGTH_CHANGE, NULL);
1624
        evas_object_smart_callback_call(obj, "length_change", NULL);
1625
     }
1626
}
1627

1628
EMOTION_API void
1629
_emotion_frame_resize(Evas_Object *obj, int w, int h, double ratio)
1630
{
1631
   Efl_Canvas_Video_Data *sd;
1632
   double tmp;
1633
   int changed = 0;
1634

1635
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1636
   if ((w != sd->video.w) || (h != sd->video.h))
1637
     {
1638
        sd->video.w = w;
1639
        sd->video.h = h;
1640
        _emotion_image_data_zero(sd->obj);
1641
        changed = 1;
1642
     }
1643
   if (h > 0) tmp  = (double)w / (double)h;
1644
   else tmp = 1.0;
1645
   if (!EINA_DBL_EQ(ratio, tmp)) tmp = ratio;
1646
   if (!EINA_DBL_EQ(tmp, sd->ratio))
1647
     {
1648
        sd->ratio = tmp;
1649
        changed = 1;
1650
     }
1651
   if (changed)
1652
     {
1653
        evas_object_size_hint_request_set(obj, w, h);
1654
        efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_FRAME_RESIZE, NULL);
1655
        evas_object_smart_callback_call(obj, "frame_resize", NULL);
1656
        evas_object_geometry_get(obj, NULL, NULL, &w, &h);
1657
        _efl_canvas_video_aspect_border_apply(obj, sd, w, h);
1658
     }
1659
}
1660

1661
EMOTION_API void
1662
_emotion_image_reset(Evas_Object *obj)
1663
{
1664
   Efl_Canvas_Video_Data *sd;
1665

1666
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1667
   _emotion_image_data_zero(sd->obj);
1668
}
1669

1670
EMOTION_API void
1671
_emotion_decode_stop(Evas_Object *obj)
1672
{
1673
   Efl_Canvas_Video_Data *sd;
1674

1675
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1676
   if (sd->play)
1677
     {
1678
        sd->play = 0;
1679
        evas_object_smart_callback_call(obj, "decode_stop", NULL);
1680
     }
1681
}
1682

1683
EMOTION_API void
1684
_emotion_open_done(Evas_Object *obj)
1685
{
1686
   Efl_Canvas_Video_Data *sd;
1687

1688
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1689
   sd->open = 1;
1690

1691
   if (!EINA_DBL_EQ(sd->remember_jump, 0.0))
1692
     emotion_object_position_set(obj, sd->remember_jump);
1693
   if (sd->remember_play != sd->play)
1694
     {
1695
        if (sd->pause)
1696
          sd->play = sd->remember_play;
1697
        else
1698
          emotion_object_play_set(obj, sd->remember_play);
1699
     }
1700
   efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_OPEN_DONE, NULL);
1701
   evas_object_smart_callback_call(obj, "open_done", NULL);
1702
}
1703

1704
EMOTION_API void
1705
_emotion_playback_started(Evas_Object *obj)
1706
{
1707
   efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_PLAYBACK_START, NULL);
1708
   evas_object_smart_callback_call(obj, "playback_started", NULL);
1709
}
1710

1711
EMOTION_API void
1712
_emotion_playback_finished(Evas_Object *obj)
1713
{
1714
   Efl_Canvas_Video_Data *sd;
1715

1716
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1717
   efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_PLAYBACK_STOP, NULL);
1718
   evas_object_smart_callback_call(obj, "playback_finished", NULL);
1719
}
1720

1721
EMOTION_API void
1722
_emotion_audio_level_change(Evas_Object *obj)
1723
{
1724
   efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_VOLUME_CHANGE, NULL);
1725
   evas_object_smart_callback_call(obj, "audio_level_change", NULL);
1726
}
1727

1728
EMOTION_API void
1729
_emotion_channels_change(Evas_Object *obj)
1730
{
1731
   Efl_Canvas_Video_Data *sd;
1732

1733
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1734
   efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_CHANNELS_CHANGE, NULL);
1735
   evas_object_smart_callback_call(obj, "channels_change", NULL);
1736
}
1737

1738
EMOTION_API void
1739
_emotion_title_set(Evas_Object *obj, char *title)
1740
{
1741
   Efl_Canvas_Video_Data *sd;
1742

1743
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1744
   eina_stringshare_replace(&sd->title, title);
1745
   efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_TITLE_CHANGE, NULL);
1746
   evas_object_smart_callback_call(obj, "title_change", NULL);
1747
}
1748

1749
EMOTION_API void
1750
_emotion_progress_set(Evas_Object *obj, char *info, double st)
1751
{
1752
   Efl_Canvas_Video_Data *sd;
1753

1754
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1755
   eina_stringshare_replace(&sd->progress.info, info);
1756
   sd->progress.stat = st;
1757
   efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_PROGRESS_CHANGE, NULL);
1758
   evas_object_smart_callback_call(obj, "progress_change", NULL);
1759
}
1760

1761
EMOTION_API void
1762
_emotion_file_ref_set(Evas_Object *obj, const char *file, int num)
1763
{
1764
   Efl_Canvas_Video_Data *sd;
1765

1766
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1767
   eina_stringshare_replace(&sd->ref.file, file);
1768
   sd->ref.num = num;
1769
   efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_REF_CHANGE, NULL);
1770
   evas_object_smart_callback_call(obj, "ref_change", NULL);
1771
}
1772

1773
EMOTION_API void
1774
_emotion_spu_button_num_set(Evas_Object *obj, int num)
1775
{
1776
   Efl_Canvas_Video_Data *sd;
1777

1778
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1779
   sd->spu.button_num = num;
1780
   efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_BUTTON_NUM_CHANGE, NULL);
1781
   evas_object_smart_callback_call(obj, "button_num_change", NULL);
1782
}
1783

1784
EMOTION_API void
1785
_emotion_spu_button_set(Evas_Object *obj, int button)
1786
{
1787
   Efl_Canvas_Video_Data *sd;
1788

1789
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1790
   sd->spu.button = button;
1791
   efl_event_callback_call(obj, EFL_CANVAS_VIDEO_EVENT_BUTTON_CHANGE, NULL);
1792
   evas_object_smart_callback_call(obj, "button_change", NULL);
1793
}
1794

1795
EMOTION_API void
1796
_emotion_seek_done(Evas_Object *obj)
1797
{
1798
   Efl_Canvas_Video_Data *sd;
1799

1800
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1801
   if (sd->seeking)
1802
     {
1803
        sd->seeking = 0;
1804
        if (sd->seek) emotion_object_position_set(obj, sd->seek_pos);
1805
     }
1806
}
1807

1808
EMOTION_API void
1809
_emotion_frame_refill(Evas_Object *obj, double w, double h)
1810
{
1811
   Efl_Canvas_Video_Data *sd;
1812

1813
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1814
   if ((!EINA_DBL_EQ(sd->fill.w, w)) ||
1815
       (!EINA_DBL_EQ(sd->fill.h, h)))
1816
     {
1817
        Evas_Coord ow, oh;
1818

1819
        evas_object_geometry_get(obj, NULL, NULL, &ow, &oh);
1820
        if ((w <= 0) || (h <= 0))
1821
          {
1822
             double scale_w, scale_h;
1823

1824
             sd->fill.w = -1;
1825
             sd->fill.h = -1;
1826

1827
             scale_w = (double)ow / (double)(sd->video.w - sd->crop.l - sd->crop.r);
1828
             scale_h = (double)oh / (double)(sd->video.h - sd->crop.t - sd->crop.b);
1829
             evas_object_image_fill_set(sd->obj, 0, 0, scale_w * sd->video.w, scale_h * sd->video.h);
1830
          }
1831
        else
1832
          {
1833
             sd->fill.w = w;
1834
             sd->fill.h = h;
1835
             evas_object_image_fill_set(sd->obj, 0, 0, w * ow, h * oh);
1836
          }
1837
     }
1838
}
1839

1840
/****************************/
1841
/* Internal object routines */
1842
/****************************/
1843

1844
static void
1845
_mouse_move(void *data, Evas *ev EINA_UNUSED, Evas_Object *obj, void *event_info)
1846
{
1847
   Evas_Event_Mouse_Move *e;
1848
   Efl_Canvas_Video_Data *sd;
1849
   int x, y, iw, ih;
1850
   Evas_Coord ox, oy, ow, oh;
1851

1852
   e = event_info;
1853
   sd = data;
1854
   if (!sd->engine_instance) return;
1855
   evas_object_geometry_get(obj, &ox, &oy, &ow, &oh);
1856
   evas_object_image_size_get(obj, &iw, &ih);
1857
   if ((iw < 1) || (ih < 1)) return;
1858
   x = (((int)e->cur.canvas.x - ox) * iw) / ow;
1859
   y = (((int)e->cur.canvas.y - oy) * ih) / oh;
1860
   emotion_engine_instance_event_mouse_move_feed(sd->engine_instance, x, y);
1861
}
1862

1863
static void
1864
_mouse_down(void *data, Evas *ev EINA_UNUSED, Evas_Object *obj, void *event_info)
1865
{
1866
   Evas_Event_Mouse_Down *e;
1867
   Efl_Canvas_Video_Data *sd;
1868
   int x, y, iw, ih;
1869
   Evas_Coord ox, oy, ow, oh;
1870

1871
   e = event_info;
1872
   sd = data;
1873
   if (!sd->engine_instance) return;
1874
   evas_object_geometry_get(obj, &ox, &oy, &ow, &oh);
1875
   evas_object_image_size_get(obj, &iw, &ih);
1876
   if ((iw < 1) || (ih < 1)) return;
1877
   x = (((int)e->canvas.x - ox) * iw) / ow;
1878
   y = (((int)e->canvas.y - oy) * ih) / oh;
1879
   emotion_engine_instance_event_mouse_button_feed(sd->engine_instance, 1, x, y);
1880
}
1881

1882
static void
1883
_pos_set_job(void *data)
1884
{
1885
   Evas_Object *obj;
1886
   Efl_Canvas_Video_Data *sd;
1887

1888
   obj = data;
1889
   E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
1890
   sd->job = NULL;
1891
   if (!sd->engine_instance) return;
1892
   if (sd->seeking) return;
1893
   if (sd->seek)
1894
     {
1895
        sd->seeking = 1;
1896
        emotion_engine_instance_pos_set(sd->engine_instance, sd->seek_pos);
1897
        sd->seek = 0;
1898
     }
1899
}
1900

1901
/* called by evas when it needs pixels for the image object */
1902
static void
1903
_pixels_get(void *data, Evas_Object *obj)
1904
{
1905
   int iw, ih, w, h;
1906
   Efl_Canvas_Video_Data *sd;
1907
   Emotion_Format format;
1908
   unsigned char *bgra_data;
1909

1910
   sd = data;
1911
   if (!sd->engine_instance) return;
1912
   emotion_engine_instance_video_data_size_get(sd->engine_instance, &w, &h);
1913
   w = (w >> 1) << 1;
1914
   h = (h >> 1) << 1;
1915

1916
   evas_object_image_colorspace_set(obj, EVAS_COLORSPACE_YCBCR422P601_PL);
1917
   evas_object_image_alpha_set(obj, 0);
1918
   evas_object_image_size_set(obj, w, h);
1919
   iw = w;
1920
   ih = h;
1921

1922
   if ((iw <= 1) || (ih <= 1))
1923
     {
1924
        _emotion_image_data_zero(sd->obj);
1925
        evas_object_image_pixels_dirty_set(obj, 0);
1926
     }
1927
   else
1928
     {
1929
        format = emotion_engine_instance_format_get(sd->engine_instance);
1930
        if ((format == EMOTION_FORMAT_YV12) || (format == EMOTION_FORMAT_I420))
1931
          {
1932
             unsigned char **rows;
1933

1934
             evas_object_image_colorspace_set(obj, EVAS_COLORSPACE_YCBCR422P601_PL);
1935
             rows = evas_object_image_data_get(obj, 1);
1936
             if (rows)
1937
               {
1938
                  if (emotion_engine_instance_yuv_rows_get(sd->engine_instance, iw, ih,
1939
                                                           rows,
1940
                                                           &rows[ih],
1941
                                                           &rows[ih + (ih / 2)]))
1942
                  evas_object_image_data_update_add(obj, 0, 0, iw, ih);
1943
               }
1944
             evas_object_image_data_set(obj, rows);
1945
             evas_object_image_pixels_dirty_set(obj, 0);
1946
          }
1947
        else if (format == EMOTION_FORMAT_BGRA)
1948
          {
1949
             evas_object_image_colorspace_set(obj, EVAS_COLORSPACE_ARGB8888);
1950
             if (emotion_engine_instance_bgra_data_get(sd->engine_instance, &bgra_data))
1951
               {
1952
                  evas_object_image_data_set(obj, bgra_data);
1953
                  evas_object_image_pixels_dirty_set(obj, 0);
1954
               }
1955
          }
1956
     }
1957
}
1958

1959
/*******************************************/
1960
/* Internal smart object required routines */
1961
/*******************************************/
1962

1963
EOLIAN static void
1964
_efl_canvas_video_efl_canvas_group_group_add(Evas_Object *obj, Efl_Canvas_Video_Data *sd)
1965
{
1966
   Emotion_Xattr_Data *xattr;
1967
   unsigned int *pixel;
1968

1969
   /* TODO: remove legacy: emotion used to have no init, call automatically */
1970
   emotion_init();
1971

1972
   efl_canvas_group_add(efl_super(obj, MY_CLASS));
1973

1974
   sd->state = EMOTION_WAKEUP;
1975
   sd->obj = evas_object_image_add(evas_object_evas_get(obj));
1976
   sd->bg = evas_object_rectangle_add(evas_object_evas_get(obj));
1977
   sd->engine = eina_stringshare_add("gstreamer1");
1978
   evas_object_color_set(sd->bg, 0, 0, 0, 0);
1979
   evas_object_event_callback_add(sd->obj, EVAS_CALLBACK_MOUSE_MOVE, _mouse_move, sd);
1980
   evas_object_event_callback_add(sd->obj, EVAS_CALLBACK_MOUSE_DOWN, _mouse_down, sd);
1981
   evas_object_image_pixels_get_callback_set(sd->obj, _pixels_get, sd);
1982
   evas_object_smart_member_add(sd->obj, obj);
1983
   evas_object_smart_member_add(sd->bg, obj);
1984
   evas_object_lower(sd->bg);
1985
   sd->ratio = 1.0;
1986
   sd->spu.button = -1;
1987
   sd->fill.w = -1;
1988
   sd->fill.h = -1;
1989
   evas_object_image_alpha_set(sd->obj, 0);
1990
   pixel = evas_object_image_data_get(sd->obj, 1);
1991
   if (pixel)
1992
     {
1993
        *pixel = 0xff000000;
1994
        evas_object_image_data_set(obj, pixel);
1995
     }
1996
   evas_object_show(sd->obj);
1997
   evas_object_show(sd->bg);
1998

1999
   xattr = calloc(1, sizeof(*xattr));
2000
   EINA_REFCOUNT_INIT(xattr);
2001
   efl_wref_add(obj, &xattr->obj_wref);
2002
   sd->xattr = xattr;
2003
}
2004

2005
EOLIAN static void
2006
_efl_canvas_video_efl_canvas_group_group_del(Evas_Object *obj EINA_UNUSED, Efl_Canvas_Video_Data *sd)
2007
{
2008
   if (sd->engine_instance)
2009
     {
2010
        emotion_engine_instance_file_close(sd->engine_instance);
2011
        emotion_engine_instance_del(sd->engine_instance);
2012
     }
2013
   sd->engine_instance = NULL;
2014
   if (sd->job) ecore_job_del(sd->job);
2015
   sd->job = NULL;
2016
   if (sd->anim) ecore_animator_del(sd->anim);
2017
   sd->anim = NULL;
2018
   eina_stringshare_del(sd->file);
2019
   eina_stringshare_del(sd->progress.info);
2020
   eina_stringshare_del(sd->ref.file);
2021
   sd->file = NULL;
2022
   sd->progress.info = NULL;
2023
   sd->ref.file = NULL;
2024
   _xattr_data_unref(sd->xattr);
2025
   efl_canvas_group_del(efl_super(obj, MY_CLASS));
2026
   emotion_shutdown();
2027
}
2028

2029
EOLIAN static void
2030
_efl_canvas_video_efl_gfx_entity_position_set(Evas_Object *obj, Efl_Canvas_Video_Data *sd, Eina_Position2D pos)
2031
{
2032
   Eina_Size2D sz;
2033

2034
   if (_evas_object_intercept_call(obj, EVAS_OBJECT_INTERCEPT_CB_MOVE, 0, pos.x, pos.y))
2035
     return;
2036

2037
   efl_gfx_entity_position_set(efl_super(obj, MY_CLASS), pos);
2038

2039
   sz = efl_gfx_entity_size_get(obj);
2040
   _clipper_position_size_update(obj, pos.x, pos.y, sz.w, sz.h, sd->video.w, sd->video.h);
2041
}
2042

2043
EOLIAN static void
2044
_efl_canvas_video_efl_gfx_entity_size_set(Evas_Object *obj, Efl_Canvas_Video_Data *sd, Eina_Size2D sz)
2045
{
2046
   if (_evas_object_intercept_call(obj, EVAS_OBJECT_INTERCEPT_CB_RESIZE, 0, sz.w, sz.h))
2047
     return;
2048

2049
   efl_gfx_entity_size_set(efl_super(obj, MY_CLASS), sz);
2050

2051
   _efl_canvas_video_aspect_border_apply(obj, sd, sz.w, sz.h);
2052
   evas_object_resize(sd->bg, sz.w, sz.h);
2053
}
2054

2055
/* Internal EO APIs and hidden overrides */
2056

2057
#define EFL_CANVAS_VIDEO_EXTRA_OPS \
2058
   EFL_CANVAS_GROUP_ADD_DEL_OPS(efl_canvas_video)
2059

2060

2061
#include "efl_canvas_video.eo.c"
2062
#include "efl_canvas_video_eo.legacy.c"
2063

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.