framework2
1352 строки · 39.3 Кб
1#include "ofDirectShowPlayer.h"
2#include "ofPixels.h"
3#include "ofMath.h"
4
5#ifdef _MSC_VER
6#pragma comment(lib,"Strmiids.lib")
7#endif
8
9//-------------------------------------------------------------------------------------------------------------------------------------------------------------
10//-------------------------------------------------------------------------------------------------------------------------------------------------------------
11// DirectShow includes and helper methods
12//-------------------------------------------------------------------------------------------------------------------------------------------------------------
13//-------------------------------------------------------------------------------------------------------------------------------------------------------------
14
15
16#include <dshow.h>
17#ifdef _MSC_VER
18#pragma include_alias( "dxtrans.h", "qedit.h" )
19#endif
20#define __IDxtCompositor_INTERFACE_DEFINED__
21#define __IDxtAlphaSetter_INTERFACE_DEFINED__
22#define __IDxtJpeg_INTERFACE_DEFINED__
23#define __IDxtKey_INTERFACE_DEFINED__
24#include <aviriff.h>
25#include <windows.h>
26
27//for threading
28#include <process.h>
29
30// Due to a missing qedit.h in recent Platform SDKs, we've replicated the relevant contents here
31// #include <qedit.h>
32MIDL_INTERFACE("0579154A-2B53-4994-B0D0-E773148EFF85")
33ISampleGrabberCB : public IUnknown
34{
35public:
36virtual HRESULT STDMETHODCALLTYPE SampleCB(
37double SampleTime,
38IMediaSample *pSample) = 0;
39
40virtual HRESULT STDMETHODCALLTYPE BufferCB(
41double SampleTime,
42BYTE *pBuffer,
43long BufferLen) = 0;
44
45};
46
47MIDL_INTERFACE("6B652FFF-11FE-4fce-92AD-0266B5D7C78F")
48ISampleGrabber : public IUnknown
49{
50public:
51virtual HRESULT STDMETHODCALLTYPE SetOneShot(
52BOOL OneShot) = 0;
53
54virtual HRESULT STDMETHODCALLTYPE SetMediaType(
55const AM_MEDIA_TYPE *pType) = 0;
56
57virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType(
58AM_MEDIA_TYPE *pType) = 0;
59
60virtual HRESULT STDMETHODCALLTYPE SetBufferSamples(
61BOOL BufferThem) = 0;
62
63virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer(
64/* [out][in] */ long *pBufferSize,
65/* [out] */ long *pBuffer) = 0;
66
67virtual HRESULT STDMETHODCALLTYPE GetCurrentSample(
68/* [retval][out] */ IMediaSample **ppSample) = 0;
69
70virtual HRESULT STDMETHODCALLTYPE SetCallback(
71ISampleGrabberCB *pCallback,
72long WhichMethodToCallback) = 0;
73
74};
75EXTERN_C const CLSID CLSID_SampleGrabber;
76EXTERN_C const IID IID_ISampleGrabber;
77EXTERN_C const CLSID CLSID_NullRenderer;
78
79// GetUnconnectedPin
80// Finds an unconnected pin on a filter in the desired direction
81HRESULT GetUnconnectedPin(
82IBaseFilter *pFilter, // Pointer to the filter.
83PIN_DIRECTION PinDir, // Direction of the pin to find.
84IPin **ppPin) // Receives a pointer to the pin.
85{
86*ppPin = 0;
87IEnumPins *pEnum = 0;
88IPin *pPin = 0;
89HRESULT hr = pFilter->EnumPins(&pEnum);
90if (FAILED(hr))
91{
92return hr;
93}
94while (pEnum->Next(1, &pPin, NULL) == S_OK)
95{
96PIN_DIRECTION ThisPinDir;
97pPin->QueryDirection(&ThisPinDir);
98if (ThisPinDir == PinDir)
99{
100IPin *pTmp = 0;
101hr = pPin->ConnectedTo(&pTmp);
102if (SUCCEEDED(hr)) // Already connected, not the pin we want.
103{
104pTmp->Release();
105}
106else // Unconnected, this is the pin we want.
107{
108pEnum->Release();
109*ppPin = pPin;
110return S_OK;
111}
112}
113pPin->Release();
114}
115pEnum->Release();
116// Did not find a matching pin.
117return E_FAIL;
118}
119
120// Disconnect any connections to the filter.
121HRESULT DisconnectPins(IBaseFilter *pFilter)
122{
123IEnumPins *pEnum = 0;
124IPin *pPin = 0;
125HRESULT hr = pFilter->EnumPins(&pEnum);
126if (FAILED(hr))
127{
128return hr;
129}
130
131while (pEnum->Next(1, &pPin, NULL) == S_OK)
132{
133pPin->Disconnect();
134pPin->Release();
135}
136pEnum->Release();
137
138// Did not find a matching pin.
139return S_OK;
140}
141
142// ConnectFilters
143// Connects a pin of an upstream filter to the pDest downstream filter
144HRESULT ConnectFilters(
145IGraphBuilder *pGraph, // Filter Graph Manager.
146IPin *pOut, // Output pin on the upstream filter.
147IBaseFilter *pDest) // Downstream filter.
148{
149if ((pGraph == NULL) || (pOut == NULL) || (pDest == NULL))
150{
151return E_POINTER;
152}
153#ifdef debug
154PIN_DIRECTION PinDir;
155pOut->QueryDirection(&PinDir);
156_ASSERTE(PinDir == PINDIR_OUTPUT);
157#endif
158
159// Find an input pin on the downstream filter.
160IPin *pIn = 0;
161HRESULT hr = GetUnconnectedPin(pDest, PINDIR_INPUT, &pIn);
162if (FAILED(hr))
163{
164return hr;
165}
166// Try to connect them.
167hr = pGraph->Connect(pOut, pIn);
168pIn->Release();
169return hr;
170}
171
172
173
174// ConnectFilters
175// Connects two filters
176HRESULT ConnectFilters(
177IGraphBuilder *pGraph,
178IBaseFilter *pSrc,
179IBaseFilter *pDest)
180{
181if ((pGraph == NULL) || (pSrc == NULL) || (pDest == NULL))
182{
183return E_POINTER;
184}
185
186// Find an output pin on the first filter.
187IPin *pOut = 0;
188HRESULT hr = GetUnconnectedPin(pSrc, PINDIR_OUTPUT, &pOut);
189if (FAILED(hr))
190{
191return hr;
192}
193hr = ConnectFilters(pGraph, pOut, pDest);
194pOut->Release();
195return hr;
196}
197
198// LocalFreeMediaType
199// Free the format buffer in the media type
200void LocalFreeMediaType(AM_MEDIA_TYPE& mt)
201{
202if (mt.cbFormat != 0)
203{
204CoTaskMemFree((PVOID)mt.pbFormat);
205mt.cbFormat = 0;
206mt.pbFormat = NULL;
207}
208if (mt.pUnk != NULL)
209{
210// Unecessary because pUnk should not be used, but safest.
211mt.pUnk->Release();
212mt.pUnk = NULL;
213}
214}
215
216// LocalDeleteMediaType
217// Free the format buffer in the media type,
218// then delete the MediaType ptr itself
219void LocalDeleteMediaType(AM_MEDIA_TYPE *pmt)
220{
221if (pmt != NULL)
222{
223LocalFreeMediaType(*pmt); // See FreeMediaType for the implementation.
224CoTaskMemFree(pmt);
225}
226}
227
228
229HRESULT SaveGraphFile(IGraphBuilder *pGraph, WCHAR *wszPath)
230{
231const WCHAR wszStreamName[] = L"ActiveMovieGraph";
232HRESULT hr;
233
234IStorage *pStorage = NULL;
235hr = StgCreateDocfile(
236wszPath,
237STGM_CREATE | STGM_TRANSACTED | STGM_READWRITE | STGM_SHARE_EXCLUSIVE,
2380, &pStorage);
239if(FAILED(hr))
240{
241return hr;
242}
243
244IStream *pStream;
245hr = pStorage->CreateStream(
246wszStreamName,
247STGM_WRITE | STGM_CREATE | STGM_SHARE_EXCLUSIVE,
2480, 0, &pStream);
249if (FAILED(hr))
250{
251pStorage->Release();
252return hr;
253}
254
255IPersistStream *pPersist = NULL;
256pGraph->QueryInterface(IID_IPersistStream, (void**)&pPersist);
257hr = pPersist->Save(pStream, TRUE);
258pStream->Release();
259pPersist->Release();
260if (SUCCEEDED(hr))
261{
262hr = pStorage->Commit(STGC_DEFAULT);
263}
264pStorage->Release();
265return hr;
266}
267
268//-------------------------------------------------------------------------------------------------------------------------------------------------------------
269//-------------------------------------------------------------------------------------------------------------------------------------------------------------
270// DirectShowVideo - contains a simple directshow video player implementation
271//-------------------------------------------------------------------------------------------------------------------------------------------------------------
272//-------------------------------------------------------------------------------------------------------------------------------------------------------------
273
274namespace{
275int comRefCount = 0;
276
277void retainCom(){
278if( comRefCount == 0 ){
279//printf("com is initialized!\n");
280CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
281}
282comRefCount++;
283}
284
285void releaseCom(){
286comRefCount--;
287if( comRefCount == 0 ){
288//printf("com is uninitialized!\n");
289CoUninitialize();
290}
291}
292
293void releaseSample(IMediaSample * sample){
294sample->Release();
295}
296}
297
298
299class DirectShowVideo : public ISampleGrabberCB{
300public:
301
302DirectShowVideo(){
303retainCom();
304clearValues();
305InitializeCriticalSection(&critSection);
306}
307
308~DirectShowVideo(){
309tearDown();
310middleSample.reset();
311backSample.reset();
312releaseCom();
313DeleteCriticalSection(&critSection);
314}
315
316void tearDown(){
317//printf("tearDown\n");
318
319if(m_pControl){
320m_pControl->Release();
321}
322if(m_pEvent){
323m_pEvent->Release();
324}
325if(m_pSeek){
326m_pSeek->Release();
327}
328if(m_pAudio){
329m_pAudio->Release();
330}
331if(m_pBasicVideo){
332m_pBasicVideo->Release();
333}
334if(m_pGrabber){
335m_pGrabber->Release();
336}
337if(m_pGrabberF){
338m_pGrabberF->Release();
339}
340if(m_pGraph){
341m_pGraph->Release();
342}
343if(m_pNullRenderer){
344m_pNullRenderer->Release();
345}
346if( m_pSourceFile ){
347m_pSourceFile->Release();
348}
349if( m_pPosition ){
350m_pPosition->Release();
351}
352clearValues();
353}
354
355void clearValues(){
356hr = 0;
357
358m_pGraph = NULL;
359m_pControl = NULL;
360m_pEvent = NULL;
361m_pSeek = NULL;
362m_pAudio = NULL;
363m_pGrabber = NULL;
364m_pGrabberF = NULL;
365m_pBasicVideo = NULL;
366m_pNullRenderer = NULL;
367m_pSourceFile = NULL;
368m_pPosition = NULL;
369
370timeNow = 0;
371lPositionInSecs = 0;
372lDurationInNanoSecs = 0;
373lTotalDuration = 0;
374rtNew = 0;
375lPosition = 0;
376lvolume = -1000;
377evCode = 0;
378width = height = 0;
379bVideoOpened = false;
380bLoop = true;
381bPaused = false;
382bPlaying = false;
383bEndReached = false;
384bNewPixels = false;
385bFrameNew = false;
386curMovieFrame = -1;
387frameCount = -1;
388
389movieRate = 1.0;
390averageTimePerFrame = 1.0/30.0;
391}
392
393//------------------------------------------------
394STDMETHODIMP_(ULONG) AddRef() { return 1; }
395STDMETHODIMP_(ULONG) Release() { return 2; }
396
397
398//------------------------------------------------
399STDMETHODIMP QueryInterface(REFIID riid, void **ppvObject){
400*ppvObject = static_cast<ISampleGrabberCB*>(this);
401return S_OK;
402}
403
404
405//------------------------------------------------
406STDMETHODIMP SampleCB(double Time, IMediaSample *pSample){
407
408BYTE * ptrBuffer = NULL;
409HRESULT hr = pSample->GetPointer(&ptrBuffer);
410
411if(hr == S_OK){
412std::size_t latestBufferLength = pSample->GetActualDataLength();
413if(latestBufferLength == pixels.getTotalBytes() ){
414EnterCriticalSection(&critSection);
415pSample->AddRef();
416backSample = std::unique_ptr<IMediaSample, std::function<void(IMediaSample*)>>(pSample, releaseSample);
417bNewPixels = true;
418
419//this is just so we know if there is a new frame
420frameCount++;
421
422LeaveCriticalSection(&critSection);
423}else{
424ofLogError() << "SampleCB() - buffer sizes do not match "<< latestBufferLength << " " << pixels.getTotalBytes();
425}
426}
427
428return S_OK;
429}
430
431//This method is meant to have more overhead
432STDMETHODIMP BufferCB(double Time, BYTE *pBuffer, long BufferLen){
433return E_NOTIMPL;
434}
435
436bool loadMovie(of::filesystem::path path, ofPixelFormat format){
437tearDown();
438this->pixelFormat = format;
439
440// Create the Filter Graph Manager and query for interfaces.
441
442//printf("step 1\n");
443hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void **)&m_pGraph);
444if (FAILED(hr)){
445tearDown();
446return false;
447}
448
449//printf("step 2\n");
450hr = m_pGraph->QueryInterface(IID_IMediaSeeking, (void**)&m_pSeek);
451if (FAILED(hr)){
452tearDown();
453return false;
454}
455
456hr = m_pGraph->QueryInterface(IID_IMediaPosition, (LPVOID *)&m_pPosition);
457if (FAILED(hr)){
458tearDown();
459return false;
460}
461
462hr = m_pGraph->QueryInterface(IID_IBasicAudio,(void**)&m_pAudio);
463if (FAILED(hr)){
464tearDown();
465return false;
466}
467
468// Use IGraphBuilder::QueryInterface (inherited from IUnknown) to get the IMediaControl interface.
469//printf("step 4\n");
470hr = m_pGraph->QueryInterface(IID_IMediaControl, (void **)&m_pControl);
471if (FAILED(hr)){
472tearDown();
473return false;
474}
475
476// And get the Media Event interface, too.
477//printf("step 5\n");
478hr = m_pGraph->QueryInterface(IID_IMediaEvent, (void **)&m_pEvent);
479if (FAILED(hr)){
480tearDown();
481return false;
482}
483
484//SAMPLE GRABBER (ALLOWS US TO GRAB THE BUFFER)//
485// Create the Sample Grabber.
486hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,IID_IBaseFilter, (void**)&m_pGrabberF);
487if (FAILED(hr)){
488tearDown();
489return false;
490}
491
492hr = m_pGraph->AddFilter(m_pGrabberF, L"Sample Grabber");
493if (FAILED(hr)){
494tearDown();
495return false;
496}
497
498hr = m_pGrabberF->QueryInterface(IID_ISampleGrabber, (void**)&m_pGrabber);
499if (FAILED(hr)){
500tearDown();
501return false;
502}
503
504hr = m_pGrabber->SetCallback(this, 0);
505if (FAILED(hr)){
506tearDown();
507return false;
508}
509
510//MEDIA CONVERSION
511//Get video properties from the stream's mediatype and apply to the grabber (otherwise we don't get an RGB image)
512AM_MEDIA_TYPE mt;
513ZeroMemory(&mt,sizeof(AM_MEDIA_TYPE));
514
515mt.majortype = MEDIATYPE_Video;
516switch (format) {
517case OF_PIXELS_RGB:
518case OF_PIXELS_BGR:
519mt.subtype = MEDIASUBTYPE_RGB24;
520break;
521case OF_PIXELS_BGRA:
522case OF_PIXELS_RGBA:
523mt.subtype = MEDIASUBTYPE_RGB32;
524break;
525default:
526ofLogError("DirectShowPlayer") << "Trying to set unsupported format this is an internal bug, using default RGB";
527mt.subtype = MEDIASUBTYPE_RGB24;
528}
529
530mt.formattype = FORMAT_VideoInfo;
531//printf("step 5.5\n");
532hr = m_pGrabber->SetMediaType(&mt);
533if (FAILED(hr)){
534tearDown();
535return false;
536}
537
538//printf("step 6\n");
539std::string pathString = path.string();
540std::wstring filePathW = std::wstring(pathString.begin(), pathString.end());
541
542//this is the easier way to connect the graph, but we have to remove the video window manually
543hr = m_pGraph->RenderFile(filePathW.c_str(), NULL);
544
545//this is the more manual way to do it - its a pain though because the audio won't be connected by default
546/*hr = m_pGraph->AddSourceFilter(filePathW.c_str(), L"Source", &m_pSourceFile);
547if (FAILED(hr)){
548printf("unable to AddSourceFilter\n");
549tearDown();
550return false;
551}*/
552//hr = ConnectFilters(m_pGraph, m_pSourceFile, m_pGrabberF);
553//if (FAILED(hr)){
554// printf("unable to ConnectFilters(m_pGraph, m_pSourceFile, m_pGrabberF)\n");
555// tearDown();
556// return false;
557//}
558
559//printf("step 7\n");
560if (SUCCEEDED(hr)){
561
562//Set Params - One Shot should be false unless you want to capture just one buffer
563hr = m_pGrabber->SetOneShot(FALSE);
564if (FAILED(hr)){
565printf("unable to set one shot\n");
566tearDown();
567return false;
568}
569
570//apparently setting to TRUE causes a small memory leak
571hr = m_pGrabber->SetBufferSamples(FALSE);
572if (FAILED(hr)){
573printf("unable to set buffer samples\n");
574tearDown();
575return false;
576}
577
578//NULL RENDERER//
579//used to give the video stream somewhere to go to.
580hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)(&m_pNullRenderer));
581if (FAILED(hr)){
582printf("null renderer error\n");
583tearDown();
584return false;
585}
586
587hr = m_pGraph->AddFilter(m_pNullRenderer, L"Render");
588if (FAILED(hr)){
589printf("unable to add null renderer\n");
590tearDown();
591return false;
592}
593
594//hr = ConnectFilters(m_pGraph, m_pGrabberF, m_pNullRenderer);
595//if (FAILED(hr)){
596// printf("unable to ConnectFilters(m_pGraph, m_pGrabberF, m_pNullRenderer)\n");
597// tearDown();
598// return false;
599//}
600
601AM_MEDIA_TYPE mt;
602ZeroMemory(&mt,sizeof(AM_MEDIA_TYPE));
603
604hr = m_pGrabber->GetConnectedMediaType(&mt);
605if (FAILED(hr)){
606printf("unable to call GetConnectedMediaType\n");
607tearDown();
608return false;
609}
610
611VIDEOINFOHEADER * infoheader = (VIDEOINFOHEADER*)mt.pbFormat;
612width = infoheader->bmiHeader.biWidth;
613height = infoheader->bmiHeader.biHeight;
614averageTimePerFrame = infoheader->AvgTimePerFrame / 10000000.0;
615pixels.allocate(width, height, pixelFormat);
616
617//printf("video dimensions are %i %i\n", width, height);
618
619//we need to manually change the output from the renderer window to the null renderer
620IBaseFilter * m_pVideoRenderer;
621IPin* pinIn = 0;
622IPin* pinOut = 0;
623
624hr = m_pGraph->FindFilterByName(L"Video Renderer", &m_pVideoRenderer);
625
626if (FAILED(hr)) {
627//newer graphs use Video Mixing Renderer 9
628hr = m_pGraph->FindFilterByName(L"Video Mixing Renderer 9", &m_pVideoRenderer);
629if (FAILED(hr)) {
630printf("failed to find the video renderer\n");
631tearDown();
632return false;
633}
634}
635
636//we disconnect the video renderer window by finding the output pin of the sample grabber
637hr = m_pGrabberF->FindPin(L"Out", &pinOut);
638if (FAILED(hr)){
639printf("failed to find the sample grabber output pin\n");
640tearDown();
641return false;
642}
643
644hr = pinOut->Disconnect();
645if (FAILED(hr)){
646printf("failed to disconnect grabber output pin\n");
647tearDown();
648return false;
649}
650
651//SaveGraphFile(m_pGraph, L"test1.grf");
652
653//we have to remove it as well otherwise the graph builder will reconnect it
654hr = m_pGraph->RemoveFilter(m_pVideoRenderer);
655if (FAILED(hr)){
656printf("failed to remove the default renderer\n");
657tearDown();
658return false;
659}else{
660m_pVideoRenderer->Release();
661}
662
663//now connect the null renderer to the grabber output, if we don't do this not frames will be captured
664hr = m_pNullRenderer->FindPin(L"In", &pinIn);
665if (FAILED(hr)){
666printf("failed to find the input pin of the null renderer\n");
667tearDown();
668return false;
669}
670
671hr = pinOut->Connect(pinIn, NULL);
672if (FAILED(hr)){
673printf("failed to connect the null renderer\n");
674tearDown();
675return false;
676}
677
678//printf("step 8\n");
679// Run the graph.
680
681//SaveGraphFile(m_pGraph, L"test2.grf");
682hr = m_pControl->Run();
683//SaveGraphFile(m_pGraph, L"test3.grf");
684
685// Now pause the graph.
686hr = m_pControl->Stop();
687updatePlayState();
688
689if( FAILED(hr) || width == 0 || height == 0 ){
690tearDown();
691printf("Error occured while playing or pausing or opening the file\n");
692return false;
693}
694}else{
695tearDown();
696printf("Error occured while playing or pausing or opening the file\n");
697return false;
698}
699
700bVideoOpened = true;
701return true;
702}
703
704void update(){
705if( bVideoOpened ){
706
707long eventCode = 0;
708#ifdef _WIN64
709long long ptrParam1 = 0;
710long long ptrParam2 = 0;
711#else
712long ptrParam1 = 0;
713long ptrParam2 = 0;
714#endif
715if( curMovieFrame != frameCount ){
716bFrameNew = true;
717}else{
718bFrameNew = false;
719}
720curMovieFrame = frameCount;
721
722while (S_OK == m_pEvent->GetEvent(&eventCode, &ptrParam1, &ptrParam2, 0)){
723if (eventCode == EC_COMPLETE ){
724if(bLoop){
725//printf("Restarting!\n");
726setPosition(0.0);
727}else{
728bEndReached = true;
729//printf("movie end reached!\n");
730stop();
731updatePlayState();
732}
733}
734//printf("Event code: %#04x\n Params: %d, %d\n", eventCode, ptrParam1, ptrParam2);
735m_pEvent->FreeEventParams(eventCode, ptrParam1, ptrParam2);
736}
737}
738}
739
740bool isLoaded(){
741return bVideoOpened;
742}
743
744//volume has to be log corrected/converted
745void setVolume(float volPct){
746if( isLoaded() ){
747if( volPct < 0 ) volPct = 0.0;
748if( volPct > 1 ) volPct = 1.0;
749
750long vol = log10(volPct) * 4000.0;
751if(vol < -8000){
752vol = -10000;
753}
754m_pAudio->put_Volume(vol);
755}
756}
757
758float getVolume(){
759float volPct = 0.0;
760if( isLoaded() ){
761long vol = 0;
762m_pAudio->get_Volume(&vol);
763volPct = powf(10, (float)vol/4000.0);
764}
765return volPct;
766}
767
768double getDurationInSeconds(){
769if( isLoaded() ){
770long long lDurationInNanoSecs = 0;
771m_pSeek->GetDuration(&lDurationInNanoSecs);
772double timeInSeconds = (double)lDurationInNanoSecs/10000000.0;
773
774return timeInSeconds;
775}
776return 0.0;
777}
778
779double getCurrentTimeInSeconds(){
780if( isLoaded() ){
781long long lCurrentTimeInNanoSecs = 0;
782m_pSeek->GetCurrentPosition(&lCurrentTimeInNanoSecs);
783double timeInSeconds = (double)lCurrentTimeInNanoSecs/10000000.0;
784
785return timeInSeconds;
786}
787return 0.0;
788}
789
790void setPosition(float pct){
791if( bVideoOpened ){
792if( pct < 0.0 ) pct = 0.0;
793if( pct > 1.0 ) pct = 1.0;
794
795long long lDurationInNanoSecs = 0;
796m_pSeek->GetDuration(&lDurationInNanoSecs);
797
798rtNew = ((float)lDurationInNanoSecs * pct);
799hr = m_pSeek->SetPositions(&rtNew, AM_SEEKING_AbsolutePositioning,NULL,AM_SEEKING_NoPositioning);
800}
801}
802
803float getPosition(){
804if( bVideoOpened ){
805float timeDur = getDurationInSeconds();
806if( timeDur > 0.0 ){
807return getCurrentTimeInSeconds() / timeDur;
808}
809}
810return 0.0;
811}
812
813void setSpeed(float speed){
814if( bVideoOpened ){
815m_pPosition->put_Rate(speed);
816m_pPosition->get_Rate(&movieRate);
817}
818}
819
820double getSpeed(){
821return movieRate;
822}
823
824bool needsRBSwap(ofPixelFormat srcFormat, ofPixelFormat dstFormat) {
825return
826((srcFormat == OF_PIXELS_BGR || srcFormat == OF_PIXELS_BGRA) && (dstFormat == OF_PIXELS_RGB || dstFormat == OF_PIXELS_RGBA)) ||
827((srcFormat == OF_PIXELS_RGB || srcFormat == OF_PIXELS_RGBA) && (dstFormat == OF_PIXELS_BGR || dstFormat == OF_PIXELS_BGRA));
828}
829
830void processPixels(ofPixels & src, ofPixels & dst){
831auto format = src.getPixelFormat();
832
833if(needsRBSwap(format, dst.getPixelFormat())){
834if (format == OF_PIXELS_BGR) {
835dst.allocate(src.getWidth(), src.getHeight(), OF_PIXELS_RGB);
836auto dstLine = dst.getLines().begin();
837auto srcLine = --src.getLines().end();
838auto endLine = dst.getLines().end();
839for (; dstLine != endLine; dstLine++, srcLine--) {
840auto dstPixel = dstLine.getPixels().begin();
841auto srcPixel = srcLine.getPixels().begin();
842auto endPixel = dstLine.getPixels().end();
843for (; dstPixel != endPixel; dstPixel++, srcPixel++) {
844dstPixel[0] = srcPixel[2];
845dstPixel[1] = srcPixel[1];
846dstPixel[2] = srcPixel[0];
847}
848}
849}
850else if (format == OF_PIXELS_BGRA) {
851dst.allocate(src.getWidth(), src.getHeight(), OF_PIXELS_RGBA);
852auto dstLine = dst.getLines().begin();
853auto srcLine = --src.getLines().end();
854auto endLine = dst.getLines().end();
855for (; dstLine != endLine; dstLine++, srcLine--) {
856auto dstPixel = dstLine.getPixels().begin();
857auto srcPixel = srcLine.getPixels().begin();
858auto endPixel = dstLine.getPixels().end();
859for (; dstPixel != endPixel; dstPixel++, srcPixel++) {
860dstPixel[0] = srcPixel[2];
861dstPixel[1] = srcPixel[1];
862dstPixel[2] = srcPixel[0];
863}
864}
865}
866} else {
867src.mirrorTo(dst, true, false);
868}
869}
870
871void play(){
872if( bVideoOpened ){
873m_pControl->Run();
874bEndReached = false;
875updatePlayState();
876}
877}
878
879void stop(){
880if( bVideoOpened ){
881if( isPlaying() ){
882setPosition(0.0);
883}
884m_pControl->Stop();
885updatePlayState();
886}
887}
888
889void setPaused(bool bPaused){
890if( bVideoOpened ){
891if( bPaused ){
892m_pControl->Pause();
893}else{
894m_pControl->Run();
895}
896updatePlayState();
897}
898
899}
900
901void updatePlayState(){
902if( bVideoOpened ){
903FILTER_STATE fs;
904hr = m_pControl->GetState(4000, (OAFilterState*)&fs);
905if(hr==S_OK){
906if( fs == State_Running ){
907bPlaying = true;
908bPaused = false;
909}
910else if( fs == State_Paused ){
911bPlaying = false;
912bPaused = true;
913}else if( fs == State_Stopped ){
914bPlaying = false;
915bPaused = false;
916}
917}
918}
919}
920
921bool isPlaying(){
922return bPlaying;
923}
924
925bool isPaused(){
926return bPaused;
927}
928
929bool isLooping(){
930return bLoop;
931}
932
933void setLoop(bool loop){
934bLoop = loop;
935}
936
937bool isMovieDone(){
938return bEndReached;
939}
940
941float getWidth(){
942return width;
943}
944
945float getHeight(){
946return height;
947}
948
949bool isFrameNew(){
950return bFrameNew;
951}
952
953void nextFrame(){
954//we have to do it like this as the frame based approach is not very accurate
955if( bVideoOpened && ( isPlaying() || isPaused() ) ){
956int curFrame = getCurrentFrameNo();
957float curFrameF = curFrame;
958for(int i = 1; i < 20; i++){
959setAproximateFrameF( curFrameF + 0.3 * (float)i );
960if( getCurrentFrameNo() >= curFrame + 1 ){
961break;
962}
963}
964}
965}
966
967void preFrame(){
968//we have to do it like this as the frame based approach is not very accurate
969if( bVideoOpened && ( isPlaying() || isPaused() ) ){
970int curFrame = getCurrentFrameNo();
971float curFrameF = curFrame;
972for(int i = 1; i < 20; i++){
973setAproximateFrameF( curFrameF - 0.3 * (float)i );
974if( getCurrentFrameNo() <= curFrame + 1 ){
975break;
976}
977}
978}
979}
980
981void setAproximateFrameF(float frameF){
982if( bVideoOpened ){
983float pct = frameF / (float)getAproximateNoFrames();
984if( pct > 1.0 ) pct = 1.0;
985if( pct < 0.0 ) pct = 0.0;
986setPosition(pct);
987}
988}
989
990void setAproximateFrame(int frame){
991if( bVideoOpened ){
992float pct = (float)frame / (float)getAproximateNoFrames();
993if( pct > 1.0 ) pct = 1.0;
994if( pct < 0.0 ) pct = 0.0;
995setPosition(pct);
996}
997}
998
999int getCurrentFrameNo(){
1000if( bVideoOpened ){
1001return getPosition() * (float) getAproximateNoFrames();
1002}
1003return 0;
1004}
1005
1006int getAproximateNoFrames(){
1007if( bVideoOpened && averageTimePerFrame > 0.0 ){
1008return getDurationInSeconds() / averageTimePerFrame;
1009}
1010return 0;
1011}
1012
1013ofPixels & getPixels(){
1014if(bVideoOpened && bNewPixels){
1015EnterCriticalSection(&critSection);
1016std::swap(backSample, middleSample);
1017bNewPixels = false;
1018LeaveCriticalSection(&critSection);
1019BYTE * ptrBuffer = NULL;
1020if( middleSample->GetPointer(&ptrBuffer) == S_OK) {
1021ofPixels srcBuffer;
1022switch (pixelFormat) {
1023case OF_PIXELS_RGB:
1024case OF_PIXELS_BGR:
1025srcBuffer.setFromExternalPixels(ptrBuffer, width, height, OF_PIXELS_BGR);
1026break;
1027case OF_PIXELS_RGBA:
1028case OF_PIXELS_BGRA:
1029srcBuffer.setFromExternalPixels(ptrBuffer, width, height, OF_PIXELS_BGRA);
1030break;
1031case OF_PIXELS_GRAY:
1032case OF_PIXELS_GRAY_ALPHA:
1033case OF_PIXELS_RGB565:
1034case OF_PIXELS_NV12:
1035case OF_PIXELS_NV21:
1036case OF_PIXELS_YV12:
1037case OF_PIXELS_I420:
1038case OF_PIXELS_YUY2:
1039case OF_PIXELS_UYVY:
1040case OF_PIXELS_Y:
1041case OF_PIXELS_U:
1042case OF_PIXELS_V:
1043case OF_PIXELS_UV:
1044case OF_PIXELS_VU:
1045case OF_PIXELS_NUM_FORMATS:
1046case OF_PIXELS_UNKNOWN:
1047case OF_PIXELS_NATIVE:
1048default:
1049break;
1050}
1051
1052processPixels(srcBuffer, pixels);
1053}
1054}
1055return pixels;
1056}
1057
1058//this is the non-callback approach
1059//void getPixels(unsigned char * dstBuffer){
1060//
1061// if(bVideoOpened && isFrameNew()){
1062// long bufferSize = videoSize;
1063// HRESULT hr = m_pGrabber->GetCurrentBuffer(&bufferSize, (long *)rawBuffer);
1064//
1065// if(hr==S_OK){
1066// if (videoSize == bufferSize){
1067// processPixels(rawBuffer, dstBuffer, width, height, true, true);
1068// }else{
1069// printf("ERROR: GetPixels() - bufferSizes do not match!\n");
1070// }
1071// }else{
1072// printf("ERROR: GetPixels() - Unable to get pixels for device bufferSize = %i \n", bufferSize);
1073// }
1074// }
1075//}
1076
1077protected:
1078
1079HRESULT hr; // COM return value
1080IGraphBuilder *m_pGraph; // Graph Builder interface
1081IMediaControl *m_pControl; // Media Control interface
1082IMediaEvent *m_pEvent; // Media Event interface
1083IMediaSeeking *m_pSeek; // Media Seeking interface
1084IMediaPosition * m_pPosition;
1085IBasicAudio *m_pAudio; // Audio Settings interface
1086ISampleGrabber * m_pGrabber;
1087IBaseFilter * m_pSourceFile;
1088IBaseFilter * m_pGrabberF;
1089IBasicVideo * m_pBasicVideo;
1090IBaseFilter * m_pNullRenderer;
1091
1092REFERENCE_TIME timeNow; // Used for FF & REW of movie, current time
1093LONGLONG lPositionInSecs; // Time in seconds
1094LONGLONG lDurationInNanoSecs; // Duration in nanoseconds
1095LONGLONG lTotalDuration; // Total duration
1096REFERENCE_TIME rtNew; // Reference time of movie
1097long lPosition; // Desired position of movie used in FF & REW
1098long lvolume; // The volume level in 1/100ths dB Valid values range from -10,000 (silence) to 0 (full volume), 0 = 0 dB -10000 = -100 dB
1099long evCode; // event variable, used to in file to complete wait.
1100
1101long width, height;
1102
1103double averageTimePerFrame;
1104
1105bool bFrameNew;
1106bool bNewPixels;
1107bool bVideoOpened;
1108bool bPlaying;
1109bool bPaused;
1110bool bLoop;
1111bool bEndReached;
1112double movieRate;
1113int curMovieFrame;
1114int frameCount;
1115
1116CRITICAL_SECTION critSection;
1117std::unique_ptr<IMediaSample, std::function<void(IMediaSample*)>> backSample;
1118std::unique_ptr<IMediaSample, std::function<void(IMediaSample*)>> middleSample;
1119ofPixels pixels;
1120ofPixelFormat pixelFormat;
1121};
1122
1123
1124
1125
1126//----------------------------------------------------------------------------------------------------------------------------------------------------------------
1127//----------------------------------------------------------------------------------------------------------------------------------------------------------------
1128// OF SPECIFIC IMPLEMENTATION BELOW
1129//----------------------------------------------------------------------------------------------------------------------------------------------------------------
1130//----------------------------------------------------------------------------------------------------------------------------------------------------------------
1131
1132ofDirectShowPlayer::ofDirectShowPlayer()
1133:pixelFormat(OF_PIXELS_RGB){
1134
1135}
1136
1137ofDirectShowPlayer::ofDirectShowPlayer(ofDirectShowPlayer && other)
1138:player(std::move(other.player))
1139,pixelFormat(std::move(other.pixelFormat)){
1140
1141}
1142
1143ofDirectShowPlayer & ofDirectShowPlayer::operator=(ofDirectShowPlayer&& other) {
1144if (&other == this) {
1145return *this;
1146}
1147
1148player = std::move(other.player);
1149pixelFormat = std::move(other.pixelFormat);
1150return *this;
1151}
1152
1153// FIXME: convert to filesystem::path in near future
1154bool ofDirectShowPlayer::load(std::string stringPath){
1155auto path = ofToDataPath(of::filesystem::path(stringPath));
1156
1157close();
1158player.reset(new DirectShowVideo());
1159bool loadOk = player->loadMovie(path, pixelFormat);
1160if( !loadOk ){
1161ofLogError("ofDirectShowPlayer") << " Cannot load video of this file type. Make sure you have codecs installed on your system. OF recommends the free K-Lite Codec pack. ";
1162}
1163return loadOk;
1164}
1165
1166void ofDirectShowPlayer::close(){
1167player.reset();
1168}
1169
1170void ofDirectShowPlayer::update(){
1171if( player && player->isLoaded() ){
1172player->update();
1173}
1174}
1175
1176void ofDirectShowPlayer::play(){
1177if( player && player->isLoaded() ){
1178player->play();
1179}
1180}
1181
1182void ofDirectShowPlayer::stop(){
1183if( player && player->isLoaded() ){
1184player->stop();
1185}
1186}
1187
1188bool ofDirectShowPlayer::isFrameNew() const{
1189return ( player && player->isFrameNew() );
1190}
1191
1192const ofPixels & ofDirectShowPlayer::getPixels() const{
1193return player->getPixels();
1194}
1195
1196ofPixels & ofDirectShowPlayer::getPixels(){
1197return player->getPixels();
1198}
1199
1200float ofDirectShowPlayer::getWidth() const{
1201if( player && player->isLoaded() ){
1202return player->getWidth();
1203}
1204return 0.0;
1205}
1206
1207float ofDirectShowPlayer::getHeight() const{
1208if( player && player->isLoaded() ){
1209return player->getHeight();
1210}
1211return 0.0;
1212}
1213
1214bool ofDirectShowPlayer::isPaused() const{
1215return ( player && player->isPaused() );
1216}
1217
1218bool ofDirectShowPlayer::isLoaded() const{
1219return ( player && player->isLoaded() );
1220}
1221
1222bool ofDirectShowPlayer::isPlaying() const{
1223return ( player && player->isPlaying() );
1224}
1225
1226bool ofDirectShowPlayer::setPixelFormat(ofPixelFormat pixelFormat){
1227switch (pixelFormat) {
1228case OF_PIXELS_RGB:
1229case OF_PIXELS_BGR:
1230case OF_PIXELS_BGRA:
1231case OF_PIXELS_RGBA:
1232this->pixelFormat = pixelFormat;
1233return true;
1234default:
1235return false;
1236}
1237}
1238
1239ofPixelFormat ofDirectShowPlayer::getPixelFormat() const{
1240return this->pixelFormat;
1241}
1242
1243//should implement!
1244float ofDirectShowPlayer::getPosition() const{
1245if( player && player->isLoaded() ){
1246return player->getPosition();
1247}
1248return 0.0;
1249}
1250
1251float ofDirectShowPlayer::getSpeed() const{
1252if( player && player->isLoaded() ){
1253return player->getSpeed();
1254}
1255return 0.0;
1256}
1257
1258float ofDirectShowPlayer::getDuration() const{
1259if( player && player->isLoaded() ){
1260return player->getDurationInSeconds();
1261}
1262return 0.0;
1263}
1264
1265
1266bool ofDirectShowPlayer::getIsMovieDone() const{
1267return ( player && player->isMovieDone() );
1268}
1269
1270void ofDirectShowPlayer::setPaused(bool bPause){
1271if( player && player->isLoaded() ){
1272player->setPaused(bPause);
1273}
1274}
1275
1276void ofDirectShowPlayer::setPosition(float pct){
1277if( player && player->isLoaded() ){
1278player->setPosition(pct);
1279}
1280}
1281
1282void ofDirectShowPlayer::setVolume(float volume){
1283if( player && player->isLoaded() ){
1284player->setVolume(volume);
1285}
1286}
1287
1288void ofDirectShowPlayer::setLoopState(ofLoopType state){
1289if( player ){
1290if( state == OF_LOOP_NONE ){
1291player->setLoop(false);
1292}
1293else if( state == OF_LOOP_NORMAL ){
1294player->setLoop(true);
1295}else{
1296ofLogError("ofDirectShowPlayer") << " cannot set loop of type palindrome ";
1297}
1298}
1299}
1300
1301void ofDirectShowPlayer::setSpeed(float speed){
1302if( player && player->isLoaded() ){
1303player->setSpeed(speed);
1304}
1305}
1306
1307int ofDirectShowPlayer::getCurrentFrame() const{
1308if( player && player->isLoaded() ){
1309return player->getCurrentFrameNo();
1310}
1311return 0;
1312}
1313
1314int ofDirectShowPlayer::getTotalNumFrames() const{
1315if( player && player->isLoaded() ){
1316return player->getAproximateNoFrames();
1317}
1318return 0;
1319}
1320
1321ofLoopType ofDirectShowPlayer::getLoopState() const{
1322if( player ){
1323if( player->isLooping() ){
1324return OF_LOOP_NORMAL;
1325}
1326
1327}
1328return OF_LOOP_NONE;
1329}
1330
1331void ofDirectShowPlayer::setFrame(int frame){
1332if( player && player->isLoaded() ){
1333frame = ofClamp(frame, 0, getTotalNumFrames());
1334return player->setAproximateFrame(frame);
1335}
1336} // frame 0 = first frame...
1337
1338void ofDirectShowPlayer::firstFrame(){
1339setPosition(0.0);
1340}
1341
1342void ofDirectShowPlayer::nextFrame(){
1343if( player && player->isLoaded() ){
1344player->nextFrame();
1345}
1346}
1347
1348void ofDirectShowPlayer::previousFrame(){
1349if( player && player->isLoaded() ){
1350player->preFrame();
1351}
1352}
1353