1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "webrtc/modules/video_capture/windows/sink_filter_ds.h"
12
13 #include "webrtc/base/platform_thread.h"
14 #include "webrtc/modules/video_capture/windows/help_functions_ds.h"
15 #include "webrtc/system_wrappers/include/trace.h"
16
17 #include <Dvdmedia.h> // VIDEOINFOHEADER2
18 #include <initguid.h>
19
20 #define DELETE_RESET(p) { delete (p) ; (p) = NULL ;}
21
22 DEFINE_GUID(CLSID_SINKFILTER, 0x88cdbbdc, 0xa73b, 0x4afa, 0xac, 0xbf, 0x15, 0xd5,
23 0xe2, 0xce, 0x12, 0xc3);
24
25 namespace webrtc
26 {
27 namespace videocapturemodule
28 {
29
30 typedef struct tagTHREADNAME_INFO
31 {
32 DWORD dwType; // must be 0x1000
33 LPCSTR szName; // pointer to name (in user addr space)
34 DWORD dwThreadID; // thread ID (-1=caller thread)
35 DWORD dwFlags; // reserved for future use, must be zero
36 } THREADNAME_INFO;
37
CaptureInputPin(int32_t moduleId,IN TCHAR * szName,IN CaptureSinkFilter * pFilter,IN CCritSec * pLock,OUT HRESULT * pHr,IN LPCWSTR pszName)38 CaptureInputPin::CaptureInputPin (int32_t moduleId,
39 IN TCHAR * szName,
40 IN CaptureSinkFilter* pFilter,
41 IN CCritSec * pLock,
42 OUT HRESULT * pHr,
43 IN LPCWSTR pszName)
44 : CBaseInputPin (szName, pFilter, pLock, pHr, pszName),
45 _requestedCapability(),
46 _resultingCapability()
47 {
48 _moduleId=moduleId;
49 _threadHandle = NULL;
50 }
51
~CaptureInputPin()52 CaptureInputPin::~CaptureInputPin()
53 {
54 }
55
56 HRESULT
GetMediaType(IN int iPosition,OUT CMediaType * pmt)57 CaptureInputPin::GetMediaType (IN int iPosition, OUT CMediaType * pmt)
58 {
59 // reset the thread handle
60 _threadHandle = NULL;
61
62 if(iPosition < 0)
63 return E_INVALIDARG;
64
65 VIDEOINFOHEADER* pvi = (VIDEOINFOHEADER*) pmt->AllocFormatBuffer(
66 sizeof(VIDEOINFOHEADER));
67 if(NULL == pvi)
68 {
69 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
70 "CheckMediaType VIDEOINFOHEADER is NULL. Returning...Line:%d\n", __LINE__);
71 return(E_OUTOFMEMORY);
72 }
73
74 ZeroMemory(pvi, sizeof(VIDEOINFOHEADER));
75 pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
76 pvi->bmiHeader.biPlanes = 1;
77 pvi->bmiHeader.biClrImportant = 0;
78 pvi->bmiHeader.biClrUsed = 0;
79 if (_requestedCapability.maxFPS != 0) {
80 pvi->AvgTimePerFrame = 10000000/_requestedCapability.maxFPS;
81 }
82
83 SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
84 SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
85
86 pmt->SetType(&MEDIATYPE_Video);
87 pmt->SetFormatType(&FORMAT_VideoInfo);
88 pmt->SetTemporalCompression(FALSE);
89
90 int32_t positionOffset=1;
91 if(_requestedCapability.codecType!=kVideoCodecUnknown)
92 {
93 positionOffset=0;
94 }
95
96 switch (iPosition+positionOffset)
97 {
98 case 0:
99 {
100 pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
101 pvi->bmiHeader.biBitCount = 12; //bit per pixel
102 pvi->bmiHeader.biWidth = _requestedCapability.width;
103 pvi->bmiHeader.biHeight = _requestedCapability.height;
104 pvi->bmiHeader.biSizeImage = 3*_requestedCapability.height
105 *_requestedCapability.width/2;
106 pmt->SetSubtype(&MEDIASUBTYPE_I420);
107 }
108 break;
109 case 1:
110 {
111 pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');;
112 pvi->bmiHeader.biBitCount = 16; //bit per pixel
113 pvi->bmiHeader.biWidth = _requestedCapability.width;
114 pvi->bmiHeader.biHeight = _requestedCapability.height;
115 pvi->bmiHeader.biSizeImage = 2*_requestedCapability.width
116 *_requestedCapability.height;
117 pmt->SetSubtype(&MEDIASUBTYPE_YUY2);
118 }
119 break;
120 case 2:
121 {
122 pvi->bmiHeader.biCompression = BI_RGB;
123 pvi->bmiHeader.biBitCount = 24; //bit per pixel
124 pvi->bmiHeader.biWidth = _requestedCapability.width;
125 pvi->bmiHeader.biHeight = _requestedCapability.height;
126 pvi->bmiHeader.biSizeImage = 3*_requestedCapability.height
127 *_requestedCapability.width;
128 pmt->SetSubtype(&MEDIASUBTYPE_RGB24);
129 }
130 break;
131 case 3:
132 {
133 pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
134 pvi->bmiHeader.biBitCount = 16; //bit per pixel
135 pvi->bmiHeader.biWidth = _requestedCapability.width;
136 pvi->bmiHeader.biHeight = _requestedCapability.height;
137 pvi->bmiHeader.biSizeImage = 2*_requestedCapability.height
138 *_requestedCapability.width;
139 pmt->SetSubtype(&MEDIASUBTYPE_UYVY);
140 }
141 break;
142 case 4:
143 {
144 pvi->bmiHeader.biCompression = MAKEFOURCC('M','J','P','G');
145 pvi->bmiHeader.biBitCount = 12; //bit per pixel
146 pvi->bmiHeader.biWidth = _requestedCapability.width;
147 pvi->bmiHeader.biHeight = _requestedCapability.height;
148 pvi->bmiHeader.biSizeImage = 3*_requestedCapability.height
149 *_requestedCapability.width/2;
150 pmt->SetSubtype(&MEDIASUBTYPE_MJPG);
151 }
152 break;
153 default :
154 return VFW_S_NO_MORE_ITEMS;
155 }
156 pmt->SetSampleSize(pvi->bmiHeader.biSizeImage);
157 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
158 "GetMediaType position %d, width %d, height %d, biCompression 0x%x",
159 iPosition, _requestedCapability.width,
160 _requestedCapability.height,pvi->bmiHeader.biCompression);
161 return NOERROR;
162 }
163
164 HRESULT
CheckMediaType(IN const CMediaType * pMediaType)165 CaptureInputPin::CheckMediaType ( IN const CMediaType * pMediaType)
166 {
167 // reset the thread handle
168 _threadHandle = NULL;
169
170 const GUID *type = pMediaType->Type();
171 if (*type != MEDIATYPE_Video)
172 return E_INVALIDARG;
173
174 const GUID *formatType = pMediaType->FormatType();
175
176 // Check for the subtypes we support
177 const GUID *SubType = pMediaType->Subtype();
178 if (SubType == NULL)
179 {
180 return E_INVALIDARG;
181 }
182
183 if(*formatType == FORMAT_VideoInfo)
184 {
185 VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) pMediaType->Format();
186 if(pvi == NULL)
187 {
188 return E_INVALIDARG;
189 }
190
191 // Store the incoming width and height
192 _resultingCapability.width = pvi->bmiHeader.biWidth;
193
194 // Store the incoming height,
195 // for RGB24 we assume the frame to be upside down
196 if(*SubType == MEDIASUBTYPE_RGB24
197 && pvi->bmiHeader.biHeight > 0)
198 {
199 _resultingCapability.height = -(pvi->bmiHeader.biHeight);
200 }
201 else
202 {
203 _resultingCapability.height = abs(pvi->bmiHeader.biHeight);
204 }
205
206 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
207 "CheckMediaType width:%d height:%d Compression:0x%x\n",
208 pvi->bmiHeader.biWidth,pvi->bmiHeader.biHeight,
209 pvi->bmiHeader.biCompression);
210
211 if(*SubType == MEDIASUBTYPE_MJPG
212 && pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
213 {
214 _resultingCapability.rawType = kVideoMJPEG;
215 return S_OK; // This format is acceptable.
216 }
217 if(*SubType == MEDIASUBTYPE_I420
218 && pvi->bmiHeader.biCompression == MAKEFOURCC('I','4','2','0'))
219 {
220 _resultingCapability.rawType = kVideoI420;
221 return S_OK; // This format is acceptable.
222 }
223 if(*SubType == MEDIASUBTYPE_YUY2
224 && pvi->bmiHeader.biCompression == MAKEFOURCC('Y','U','Y','2'))
225 {
226 _resultingCapability.rawType = kVideoYUY2;
227 ::Sleep(60); // workaround for bad driver
228 return S_OK; // This format is acceptable.
229 }
230 if(*SubType == MEDIASUBTYPE_UYVY
231 && pvi->bmiHeader.biCompression == MAKEFOURCC('U','Y','V','Y'))
232 {
233 _resultingCapability.rawType = kVideoUYVY;
234 return S_OK; // This format is acceptable.
235 }
236
237 if(*SubType == MEDIASUBTYPE_HDYC)
238 {
239 _resultingCapability.rawType = kVideoUYVY;
240 return S_OK; // This format is acceptable.
241 }
242 if(*SubType == MEDIASUBTYPE_RGB24
243 && pvi->bmiHeader.biCompression == BI_RGB)
244 {
245 _resultingCapability.rawType = kVideoRGB24;
246 return S_OK; // This format is acceptable.
247 }
248 }
249 if(*formatType == FORMAT_VideoInfo2)
250 {
251 // VIDEOINFOHEADER2 that has dwInterlaceFlags
252 VIDEOINFOHEADER2 *pvi = (VIDEOINFOHEADER2 *) pMediaType->Format();
253
254 if(pvi == NULL)
255 {
256 return E_INVALIDARG;
257 }
258
259 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
260 "CheckMediaType width:%d height:%d Compression:0x%x\n",
261 pvi->bmiHeader.biWidth,pvi->bmiHeader.biHeight,
262 pvi->bmiHeader.biCompression);
263
264 _resultingCapability.width = pvi->bmiHeader.biWidth;
265
266 // Store the incoming height,
267 // for RGB24 we assume the frame to be upside down
268 if(*SubType == MEDIASUBTYPE_RGB24
269 && pvi->bmiHeader.biHeight > 0)
270 {
271 _resultingCapability.height = -(pvi->bmiHeader.biHeight);
272 }
273 else
274 {
275 _resultingCapability.height = abs(pvi->bmiHeader.biHeight);
276 }
277
278 if(*SubType == MEDIASUBTYPE_MJPG
279 && pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
280 {
281 _resultingCapability.rawType = kVideoMJPEG;
282 return S_OK; // This format is acceptable.
283 }
284 if(*SubType == MEDIASUBTYPE_I420
285 && pvi->bmiHeader.biCompression == MAKEFOURCC('I','4','2','0'))
286 {
287 _resultingCapability.rawType = kVideoI420;
288 return S_OK; // This format is acceptable.
289 }
290 if(*SubType == MEDIASUBTYPE_YUY2
291 && pvi->bmiHeader.biCompression == MAKEFOURCC('Y','U','Y','2'))
292 {
293 _resultingCapability.rawType = kVideoYUY2;
294 return S_OK; // This format is acceptable.
295 }
296 if(*SubType == MEDIASUBTYPE_UYVY
297 && pvi->bmiHeader.biCompression == MAKEFOURCC('U','Y','V','Y'))
298 {
299 _resultingCapability.rawType = kVideoUYVY;
300 return S_OK; // This format is acceptable.
301 }
302
303 if(*SubType == MEDIASUBTYPE_HDYC)
304 {
305 _resultingCapability.rawType = kVideoUYVY;
306 return S_OK; // This format is acceptable.
307 }
308 if(*SubType == MEDIASUBTYPE_RGB24
309 && pvi->bmiHeader.biCompression == BI_RGB)
310 {
311 _resultingCapability.rawType = kVideoRGB24;
312 return S_OK; // This format is acceptable.
313 }
314 }
315 return E_INVALIDARG;
316 }
317
318 HRESULT
Receive(IN IMediaSample * pIMediaSample)319 CaptureInputPin::Receive ( IN IMediaSample * pIMediaSample )
320 {
321 HRESULT hr = S_OK;
322
323 ASSERT (m_pFilter);
324 ASSERT (pIMediaSample);
325
326 // get the thread handle of the delivering thread inc its priority
327 if( _threadHandle == NULL)
328 {
329 HANDLE handle= GetCurrentThread();
330 SetThreadPriority(handle, THREAD_PRIORITY_HIGHEST);
331 _threadHandle = handle;
332
333 rtc::SetCurrentThreadName("webrtc_video_capture");
334 }
335
336 reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->LockReceive();
337 hr = CBaseInputPin::Receive (pIMediaSample);
338
339 if (SUCCEEDED (hr))
340 {
341 const LONG length = pIMediaSample->GetActualDataLength();
342 ASSERT(length >= 0);
343
344 unsigned char* pBuffer = NULL;
345 if(S_OK != pIMediaSample->GetPointer(&pBuffer))
346 {
347 reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->UnlockReceive();
348 return S_FALSE;
349 }
350
351 // NOTE: filter unlocked within Send call
352 reinterpret_cast <CaptureSinkFilter *> (m_pFilter)->ProcessCapturedFrame(
353 pBuffer, static_cast<size_t>(length), _resultingCapability);
354 }
355 else
356 {
357 reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->UnlockReceive();
358 }
359
360 return hr;
361 }
362
363 // called under LockReceive
SetMatchingMediaType(const VideoCaptureCapability & capability)364 HRESULT CaptureInputPin::SetMatchingMediaType(
365 const VideoCaptureCapability& capability)
366 {
367
368 _requestedCapability = capability;
369 _resultingCapability = VideoCaptureCapability();
370 return S_OK;
371 }
372 // ----------------------------------------------------------------------------
CaptureSinkFilter(IN TCHAR * tszName,IN LPUNKNOWN punk,OUT HRESULT * phr,VideoCaptureExternal & captureObserver,int32_t moduleId)373 CaptureSinkFilter::CaptureSinkFilter (IN TCHAR * tszName,
374 IN LPUNKNOWN punk,
375 OUT HRESULT * phr,
376 VideoCaptureExternal& captureObserver,
377 int32_t moduleId)
378 : CBaseFilter(tszName,punk,& m_crtFilter,CLSID_SINKFILTER),
379 m_pInput(NULL),
380 _captureObserver(captureObserver),
381 _moduleId(moduleId)
382 {
383 (* phr) = S_OK;
384 m_pInput = new CaptureInputPin(moduleId,NAME ("VideoCaptureInputPin"),
385 this,
386 & m_crtFilter,
387 phr, L"VideoCapture");
388 if (m_pInput == NULL || FAILED (* phr))
389 {
390 (* phr) = FAILED (* phr) ? (* phr) : E_OUTOFMEMORY;
391 goto cleanup;
392 }
393 cleanup :
394 return;
395 }
396
~CaptureSinkFilter()397 CaptureSinkFilter::~CaptureSinkFilter()
398 {
399 delete m_pInput;
400 }
401
GetPinCount()402 int CaptureSinkFilter::GetPinCount()
403 {
404 return 1;
405 }
406
407 CBasePin *
GetPin(IN int Index)408 CaptureSinkFilter::GetPin(IN int Index)
409 {
410 CBasePin * pPin;
411 LockFilter ();
412 if (Index == 0)
413 {
414 pPin = m_pInput;
415 }
416 else
417 {
418 pPin = NULL;
419 }
420 UnlockFilter ();
421 return pPin;
422 }
423
Pause()424 STDMETHODIMP CaptureSinkFilter::Pause()
425 {
426 LockReceive();
427 LockFilter();
428 if (m_State == State_Stopped)
429 {
430 // change the state, THEN activate the input pin
431 m_State = State_Paused;
432 if (m_pInput && m_pInput->IsConnected())
433 {
434 m_pInput->Active();
435 }
436 if (m_pInput && !m_pInput->IsConnected())
437 {
438 m_State = State_Running;
439 }
440 }
441 else if (m_State == State_Running)
442 {
443 m_State = State_Paused;
444 }
445 UnlockFilter();
446 UnlockReceive();
447 return S_OK;
448 }
449
Stop()450 STDMETHODIMP CaptureSinkFilter::Stop()
451 {
452 LockReceive();
453 LockFilter();
454
455 // set the state
456 m_State = State_Stopped;
457
458 // inactivate the pins
459 if (m_pInput)
460 m_pInput->Inactive();
461
462 UnlockFilter();
463 UnlockReceive();
464 return S_OK;
465 }
466
SetFilterGraph(IGraphBuilder * graph)467 void CaptureSinkFilter::SetFilterGraph(IGraphBuilder* graph)
468 {
469 LockFilter();
470 m_pGraph = graph;
471 UnlockFilter();
472 }
473
ProcessCapturedFrame(unsigned char * pBuffer,size_t length,const VideoCaptureCapability & frameInfo)474 void CaptureSinkFilter::ProcessCapturedFrame(
475 unsigned char* pBuffer,
476 size_t length,
477 const VideoCaptureCapability& frameInfo)
478 {
479 // we have the receiver lock
480 if (m_State == State_Running)
481 {
482 _captureObserver.IncomingFrame(pBuffer, length, frameInfo);
483
484 // trying to hold it since it's only a memcpy
485 // IMPROVEMENT if this work move critsect
486 UnlockReceive();
487 return;
488 }
489 UnlockReceive();
490 return;
491 }
492
SetMatchingMediaType(const VideoCaptureCapability & capability)493 STDMETHODIMP CaptureSinkFilter::SetMatchingMediaType(
494 const VideoCaptureCapability& capability)
495 {
496 LockReceive();
497 LockFilter();
498 HRESULT hr;
499 if (m_pInput)
500 {
501 hr = m_pInput->SetMatchingMediaType(capability);
502 }
503 else
504 {
505 hr = E_UNEXPECTED;
506 }
507 UnlockFilter();
508 UnlockReceive();
509 return hr;
510 }
511
GetClassID(OUT CLSID * pCLSID)512 STDMETHODIMP CaptureSinkFilter::GetClassID( OUT CLSID * pCLSID )
513 {
514 (* pCLSID) = CLSID_SINKFILTER;
515 return S_OK;
516 }
517
518 } // namespace videocapturemodule
519 } // namespace webrtc
520