Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hrydgard
GitHub Repository: hrydgard/ppsspp
Path: blob/master/Windows/CaptureDevice.cpp
3185 views
1
// Copyright (c) 2020- PPSSPP Project.
2
3
// This program is free software: you can redistribute it and/or modify
4
// it under the terms of the GNU General Public License as published by
5
// the Free Software Foundation, version 2.0 or later versions.
6
7
// This program is distributed in the hope that it will be useful,
8
// but WITHOUT ANY WARRANTY; without even the implied warranty of
9
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10
// GNU General Public License 2.0 for more details.
11
12
// A copy of the GPL 2.0 should have been included with the program.
13
// If not, see http://www.gnu.org/licenses/
14
15
// Official git repository and contact information can be found at
16
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17
18
#include <shlwapi.h>
19
#include <wrl/client.h>
20
21
#include "Common/Thread/ThreadUtil.h"
22
#include "CaptureDevice.h"
23
#include "BufferLock.h"
24
#include "ext/jpge/jpge.h"
25
#include "CommonTypes.h"
26
#include "Core/HLE/sceUsbCam.h"
27
#include "Core/Config.h"
28
29
using Microsoft::WRL::ComPtr;
30
31
namespace MFAPI {
32
HINSTANCE Mflib;
33
HINSTANCE Mfplatlib;
34
HINSTANCE Mfreadwritelib;
35
36
typedef HRESULT(WINAPI *MFEnumDeviceSourcesFunc)(IMFAttributes *, IMFActivate ***, UINT32 *);
37
typedef HRESULT(WINAPI *MFGetStrideForBitmapInfoHeaderFunc)(DWORD, DWORD, LONG *);
38
typedef HRESULT(WINAPI *MFCreateSourceReaderFromMediaSourceFunc)(IMFMediaSource *, IMFAttributes *, IMFSourceReader **);
39
typedef HRESULT(WINAPI *MFCopyImageFunc)(BYTE *, LONG, const BYTE *, LONG, DWORD, DWORD);
40
41
MFEnumDeviceSourcesFunc EnumDeviceSources;
42
MFGetStrideForBitmapInfoHeaderFunc GetStrideForBitmapInfoHeader;
43
MFCreateSourceReaderFromMediaSourceFunc CreateSourceReaderFromMediaSource;
44
MFCopyImageFunc CopyImage;
45
}
46
47
using namespace MFAPI;
48
49
bool RegisterCMPTMFApis(){
50
//For the compatibility,these funcs don't be supported on vista.
51
Mflib = LoadLibrary(L"Mf.dll");
52
Mfplatlib = LoadLibrary(L"Mfplat.dll");
53
Mfreadwritelib = LoadLibrary(L"Mfreadwrite.dll");
54
if (!Mflib || !Mfplatlib || !Mfreadwritelib)
55
return false;
56
57
EnumDeviceSources = (MFEnumDeviceSourcesFunc)GetProcAddress(Mflib, "MFEnumDeviceSources");
58
GetStrideForBitmapInfoHeader = (MFGetStrideForBitmapInfoHeaderFunc)GetProcAddress(Mfplatlib, "MFGetStrideForBitmapInfoHeader");
59
MFAPI::CopyImage = (MFCopyImageFunc)GetProcAddress(Mfplatlib, "MFCopyImage");
60
CreateSourceReaderFromMediaSource = (MFCreateSourceReaderFromMediaSourceFunc)GetProcAddress(Mfreadwritelib, "MFCreateSourceReaderFromMediaSource");
61
if (!EnumDeviceSources || !GetStrideForBitmapInfoHeader || !CreateSourceReaderFromMediaSource || !MFAPI::CopyImage)
62
return false;
63
64
return true;
65
}
66
67
bool unRegisterCMPTMFApis() {
68
if (Mflib) {
69
FreeLibrary(Mflib);
70
Mflib = nullptr;
71
}
72
73
if (Mfplatlib) {
74
FreeLibrary(Mfplatlib);
75
Mfplatlib = nullptr;
76
}
77
78
if (Mfreadwritelib) {
79
FreeLibrary(Mfreadwritelib);
80
Mfreadwritelib = nullptr;
81
}
82
83
EnumDeviceSources = nullptr;
84
GetStrideForBitmapInfoHeader = nullptr;
85
CreateSourceReaderFromMediaSource = nullptr;
86
MFAPI::CopyImage = nullptr;
87
88
return true;
89
}
90
91
WindowsCaptureDevice *winCamera;
92
WindowsCaptureDevice *winMic;
93
94
// TODO: Add more formats, but need some tests.
95
VideoFormatTransform g_VideoFormats[] =
96
{
97
{ MFVideoFormat_RGB32, AV_PIX_FMT_RGBA },
98
{ MFVideoFormat_RGB24, AV_PIX_FMT_RGB24 },
99
{ MFVideoFormat_YUY2, AV_PIX_FMT_YUYV422 },
100
{ MFVideoFormat_NV12, AV_PIX_FMT_NV12 }
101
};
102
103
AudioFormatTransform g_AudioFormats[] = {
104
{ MFAudioFormat_PCM, 8, AV_SAMPLE_FMT_U8 },
105
{ MFAudioFormat_PCM, 16, AV_SAMPLE_FMT_S16 },
106
{ MFAudioFormat_PCM, 32, AV_SAMPLE_FMT_S32 },
107
{ MFAudioFormat_Float, 32, AV_SAMPLE_FMT_FLT }
108
};
109
110
const int g_cVideoFormats = ARRAYSIZE(g_VideoFormats);
111
const int g_cAudioFormats = ARRAYSIZE(g_AudioFormats);
112
113
MediaParam defaultVideoParam = { { 640, 480, 0, MFVideoFormat_RGB24 } };
114
MediaParam defaultAudioParam = { { 44100, 2, 16, MFAudioFormat_PCM } };
115
116
HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride);
117
118
ReaderCallback::ReaderCallback(WindowsCaptureDevice *_device) : device(_device) {}
119
120
ReaderCallback::~ReaderCallback() {
121
#ifdef USE_FFMPEG
122
if (img_convert_ctx) {
123
sws_freeContext(img_convert_ctx);
124
}
125
if (resample_ctx) {
126
swr_free(&resample_ctx);
127
}
128
#endif
129
}
130
131
HRESULT ReaderCallback::QueryInterface(REFIID riid, void** ppv)
132
{
133
static const QITAB qit[] =
134
{
135
QITABENT(ReaderCallback, IMFSourceReaderCallback),
136
{ 0 },
137
};
138
return QISearch(this, qit, riid, ppv);
139
}
140
141
HRESULT ReaderCallback::OnReadSample(
142
HRESULT hrStatus,
143
DWORD dwStreamIndex,
144
DWORD dwStreamFlags,
145
LONGLONG llTimestamp,
146
IMFSample *pSample) {
147
HRESULT hr = S_OK;
148
ComPtr<IMFMediaBuffer> pBuffer;
149
std::lock_guard<std::mutex> lock(device->sdMutex);
150
if (device->isShutDown())
151
return hr;
152
153
if (FAILED(hrStatus))
154
hr = hrStatus;
155
156
if (SUCCEEDED(hr)) {
157
if (pSample) {
158
hr = pSample->GetBufferByIndex(0, &pBuffer);
159
}
160
}
161
if (SUCCEEDED(hr)) {
162
switch (device->type) {
163
case CAPTUREDEVIDE_TYPE::VIDEO: {
164
BYTE *pbScanline0 = nullptr;
165
VideoBufferLock *videoBuffer = nullptr;
166
int imgJpegSize = device->imgJpegSize;
167
unsigned char* invertedSrcImg = nullptr;
168
LONG srcPadding = 0;
169
LONG lStride = 0;
170
171
UINT32 srcW = device->deviceParam.width;
172
UINT32 srcH = device->deviceParam.height;
173
UINT32 dstW = device->targetMediaParam.width;
174
UINT32 dstH = device->targetMediaParam.height;
175
GUID srcMFVideoFormat = device->deviceParam.videoFormat;
176
177
// pSample can be null, in this case ReadSample still should be called to request next frame.
178
if (pSample) {
179
videoBuffer = new VideoBufferLock(pBuffer.Get());
180
hr = videoBuffer->LockBuffer(device->deviceParam.default_stride, device->deviceParam.height, &pbScanline0, &lStride);
181
182
if (lStride > 0)
183
srcPadding = lStride - device->deviceParam.default_stride;
184
else
185
srcPadding = device->deviceParam.default_stride - lStride;
186
187
#ifdef USE_FFMPEG
188
if (SUCCEEDED(hr)) {
189
// Convert image to RGB24
190
if (lStride > 0) {
191
imgConvert(
192
device->imageRGB, dstW, dstH, device->imgRGBLineSizes,
193
pbScanline0, srcW, srcH, srcMFVideoFormat, srcPadding);
194
} else {
195
// If stride < 0, the pointer to the first row of source image is the last row in memory,should invert it in memory.
196
invertedSrcImg = (unsigned char*)av_malloc(av_image_get_buffer_size(getAVVideoFormatbyMFVideoFormat(srcMFVideoFormat), srcW, srcH, 1));
197
imgInvert(invertedSrcImg, pbScanline0, srcW, srcH, device->deviceParam.videoFormat, lStride);
198
// We alloc a inverted image with no padding, set padding to zero.
199
srcPadding = 0;
200
imgConvert(
201
device->imageRGB, dstW, dstH, device->imgRGBLineSizes,
202
invertedSrcImg, srcW, srcH, srcMFVideoFormat, srcPadding);
203
av_free(invertedSrcImg);
204
}
205
206
// Mirror the image in-place if needed.
207
if (g_Config.bCameraMirrorHorizontal) {
208
for (int y = 0; y < (int)dstH; y++) {
209
uint8_t *line = device->imageRGB + y * device->imgRGBLineSizes[0];
210
for (int x = 0; x < (int)dstW / 2; x++) {
211
const int invX = dstW - 1 - x;
212
const uint8_t r = line[x * 3 + 0];
213
const uint8_t g = line[x * 3 + 1];
214
const uint8_t b = line[x * 3 + 2];
215
line[x * 3 + 0] = line[invX * 3 + 0];
216
line[x * 3 + 1] = line[invX * 3 + 1];
217
line[x * 3 + 2] = line[invX * 3 + 2];
218
line[invX * 3 + 0] = r;
219
line[invX * 3 + 1] = g;
220
line[invX * 3 + 2] = b;
221
}
222
}
223
}
224
225
// Compress image to jpeg from RGB24.
226
jpge::compress_image_to_jpeg_file_in_memory(
227
device->imageJpeg, imgJpegSize,
228
dstW,
229
dstH,
230
3,
231
device->imageRGB);
232
}
233
#endif
234
Camera::pushCameraImage(imgJpegSize, device->imageJpeg);
235
}
236
// Request the next frame.
237
if (SUCCEEDED(hr)) {
238
hr = device->m_pReader->ReadSample(
239
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
240
0,
241
nullptr,
242
nullptr,
243
nullptr,
244
nullptr
245
);
246
}
247
delete videoBuffer;
248
break;
249
}
250
case CAPTUREDEVIDE_TYPE::Audio: {
251
BYTE *sampleBuf = nullptr;
252
DWORD length = 0;
253
u32 sizeAfterResample = 0;
254
// pSample can be null, in this case ReadSample still should be called to request next frame.
255
if (pSample) {
256
pBuffer->Lock(&sampleBuf, nullptr, &length);
257
if (device->needResample()) {
258
sizeAfterResample = doResample(
259
&device->resampleBuf, device->targetMediaParam.sampleRate, device->targetMediaParam.channels, &device->resampleBufSize,
260
sampleBuf, device->deviceParam.sampleRate, device->deviceParam.channels, device->deviceParam.audioFormat, length, device->deviceParam.bitsPerSample);
261
if (device->resampleBuf)
262
Microphone::addAudioData(device->resampleBuf, sizeAfterResample);
263
} else {
264
Microphone::addAudioData(sampleBuf, length);
265
}
266
pBuffer->Unlock();
267
}
268
// Request the next frame.
269
if (SUCCEEDED(hr)) {
270
hr = device->m_pReader->ReadSample(
271
(DWORD)MF_SOURCE_READER_FIRST_AUDIO_STREAM,
272
0,
273
nullptr,
274
nullptr,
275
nullptr,
276
nullptr
277
);
278
}
279
break;
280
}
281
}
282
}
283
284
return hr;
285
}
286
287
AVPixelFormat ReaderCallback::getAVVideoFormatbyMFVideoFormat(const GUID &MFVideoFormat) {
288
for (int i = 0; i < g_cVideoFormats; i++) {
289
if (MFVideoFormat == g_VideoFormats[i].MFVideoFormat)
290
return g_VideoFormats[i].AVVideoFormat;
291
}
292
return AV_PIX_FMT_RGB24;
293
}
294
295
AVSampleFormat ReaderCallback::getAVAudioFormatbyMFAudioFormat(const GUID &MFAudioFormat, const u32 &bitsPerSample) {
296
for (int i = 0; i < g_cAudioFormats; i++) {
297
if (MFAudioFormat == g_AudioFormats[i].MFAudioFormat && bitsPerSample == g_AudioFormats[i].bitsPerSample)
298
return g_AudioFormats[i].AVAudioFormat;
299
}
300
return AV_SAMPLE_FMT_S16;
301
}
302
303
void ReaderCallback::imgConvert(
304
unsigned char *dst, unsigned int &dstW, unsigned int &dstH, int dstLineSizes[4],
305
unsigned char *src, const unsigned int &srcW, const unsigned int &srcH, const GUID &srcFormat,
306
const int &srcPadding) {
307
#ifdef USE_FFMPEG
308
int srcLineSizes[4] = { 0, 0, 0, 0 };
309
unsigned char *pSrc[4];
310
unsigned char *pDst[4];
311
312
AVPixelFormat srcAVFormat = getAVVideoFormatbyMFVideoFormat(srcFormat);
313
314
315
av_image_fill_linesizes(srcLineSizes, srcAVFormat, srcW);
316
317
// Is this correct?
318
if (srcPadding != 0) {
319
for (int i = 0; i < 4; i++) {
320
if (srcLineSizes[i] != 0)
321
srcLineSizes[i] += srcPadding;
322
}
323
}
324
325
av_image_fill_pointers(pSrc, srcAVFormat, srcH, src, srcLineSizes);
326
av_image_fill_pointers(pDst, AV_PIX_FMT_RGB24, dstH, dst, dstLineSizes);
327
328
if (img_convert_ctx == nullptr) {
329
img_convert_ctx = sws_getContext(
330
srcW,
331
srcH,
332
srcAVFormat,
333
dstW,
334
dstH,
335
AV_PIX_FMT_RGB24,
336
SWS_BICUBIC,
337
nullptr,
338
nullptr,
339
nullptr
340
);
341
}
342
343
if (img_convert_ctx) {
344
sws_scale(img_convert_ctx,
345
(const uint8_t *const *)pSrc,
346
srcLineSizes,
347
0,
348
srcH,
349
(uint8_t *const *)pDst,
350
dstLineSizes
351
);
352
}
353
#endif
354
}
355
356
void ReaderCallback::imgInvert(unsigned char *dst, unsigned char *src, const int &srcW, const int &srcH, const GUID &srcFormat, const int &srcStride) {
357
#ifdef USE_FFMPEG
358
AVPixelFormat srcAvFormat = getAVVideoFormatbyMFVideoFormat(srcFormat);
359
int dstLineSizes[4] = { 0, 0, 0, 0 };
360
361
av_image_fill_linesizes(dstLineSizes, srcAvFormat, srcW);
362
363
if(srcFormat == MFVideoFormat_RGB32)
364
imgInvertRGBA(dst, dstLineSizes[0], src, srcStride, srcH);
365
else if(srcFormat == MFVideoFormat_RGB24)
366
imgInvertRGB(dst, dstLineSizes[0], src, srcStride, srcH);
367
else if (srcFormat == MFVideoFormat_YUY2)
368
imgInvertYUY2(dst, dstLineSizes[0], src, srcStride, srcH);
369
else if (srcFormat == MFVideoFormat_NV12)
370
imgInvertNV12(dst, dstLineSizes[0], src, srcStride, srcH);;
371
#endif
372
}
373
374
void ReaderCallback::imgInvertRGBA(unsigned char *dst, int &dstStride, unsigned char *src, const int &srcStride, const int &h) {
375
MFAPI::CopyImage(dst, dstStride, src, srcStride, dstStride, h);
376
}
377
378
void ReaderCallback::imgInvertRGB(unsigned char *dst, int &dstStride, unsigned char *src, const int &srcStride, const int &h) {
379
for (int y = 0; y < h; y++) {
380
for (int srcx = dstStride - 1, dstx = 0; dstx < dstStride; srcx--, dstx++) {
381
dst[dstx] = src[srcx];
382
}
383
dst += dstStride;
384
src += srcStride;
385
}
386
}
387
388
void ReaderCallback::imgInvertYUY2(unsigned char *dst, int &dstStride, unsigned char *src, const int &srcStride, const int &h) {
389
for (int y = 0; y < h; y++) {
390
for (int srcx = dstStride - 1, dstx = 0; dstx < dstStride; srcx--, dstx++) {
391
dst[dstx] = src[srcx];
392
}
393
dst += dstStride;
394
src += srcStride;
395
}
396
}
397
398
void ReaderCallback::imgInvertNV12(unsigned char *dst, int &dstStride, unsigned char *src, const int &srcStride, const int &h) {
399
unsigned char *dstY = dst;
400
unsigned char *dstU = dst + dstStride * h;
401
unsigned char *srcY = src;
402
unsigned char *srcV = src + srcStride * h;
403
404
unsigned char *srcY1 = srcY;
405
unsigned char *srcY2 = srcY1 + srcStride;
406
unsigned char *dstY1 = dstY;
407
unsigned char *dstY2 = dstY1 + dstStride;
408
409
bool isodd = h % 2 != 0;
410
411
for (int y = 0; y < (isodd ? h - 1 : h); y += 2) {
412
for (int srcx = dstStride - 1, dstx = 0; dstx < dstStride; srcx--, dstx++) {
413
dstY1[dstx] = srcY1[srcx];
414
dstY2[dstx] = srcY2[srcx];
415
dstU[dstx] = srcV[srcx];
416
}
417
dstY += dstStride * 2;
418
srcY1 += srcStride * 2;
419
srcY2 += srcStride *2;
420
srcV += srcStride;
421
dstU += dstStride;
422
}
423
}
424
425
u32 ReaderCallback::doResample(u8 **dst, u32 &dstSampleRate, u32 &dstChannels, u32 *dstSize, u8 *src, const u32 &srcSampleRate, const u32 &srcChannels, const GUID &srcFormat, const u32& srcSize, const u32& srcBitsPerSample) {
426
#ifdef USE_FFMPEG
427
AVSampleFormat srcAVFormat = getAVAudioFormatbyMFAudioFormat(srcFormat, srcBitsPerSample);
428
int outSamplesCount = 0;
429
if (resample_ctx == nullptr) {
430
resample_ctx = swr_alloc_set_opts(nullptr,
431
av_get_default_channel_layout(dstChannels),
432
AV_SAMPLE_FMT_S16,
433
dstSampleRate,
434
av_get_default_channel_layout(srcChannels),
435
srcAVFormat,
436
srcSampleRate,
437
0,
438
nullptr);
439
if (resample_ctx == nullptr || swr_init(resample_ctx) < 0) {
440
swr_free(&resample_ctx);
441
return 0;
442
}
443
}
444
int srcSamplesCount = srcSize / srcChannels / av_get_bytes_per_sample(srcAVFormat); // per channel.
445
int outCount = srcSamplesCount * dstSampleRate / srcSampleRate + 256;
446
unsigned int outSize = av_samples_get_buffer_size(nullptr, dstChannels, outCount, AV_SAMPLE_FMT_S16, 0);
447
448
if (!*dst) {
449
*dst = (u8 *)av_malloc(outSize);
450
*dstSize = outSize;
451
}
452
if (!*dst)
453
return 0;
454
455
if(*dstSize < outSize)
456
av_fast_malloc(dst, dstSize, outSize);
457
458
outSamplesCount = swr_convert(resample_ctx, dst, outCount, (const uint8_t **)&src, srcSamplesCount);
459
if (outSamplesCount < 0)
460
return 0;
461
return av_samples_get_buffer_size(nullptr, dstChannels, outSamplesCount, AV_SAMPLE_FMT_S16, 0);
462
#else
463
return 0;
464
#endif
465
}
466
467
WindowsCaptureDevice::WindowsCaptureDevice(CAPTUREDEVIDE_TYPE _type) :
468
type(_type),
469
error(CAPTUREDEVIDE_ERROR_NO_ERROR),
470
state(CAPTUREDEVIDE_STATE::UNINITIALIZED) {
471
param = { 0 };
472
deviceParam = { { 0 } };
473
474
switch (type) {
475
case CAPTUREDEVIDE_TYPE::VIDEO:
476
targetMediaParam = defaultVideoParam;
477
break;
478
case CAPTUREDEVIDE_TYPE::Audio:
479
targetMediaParam = defaultAudioParam;
480
break;
481
}
482
483
std::thread t(&WindowsCaptureDevice::messageHandler, this);
484
t.detach();
485
}
486
487
WindowsCaptureDevice::~WindowsCaptureDevice() {
488
#ifdef USE_FFMPEG
489
switch (type) {
490
case CAPTUREDEVIDE_TYPE::VIDEO:
491
av_freep(&imageRGB);
492
av_freep(&imageJpeg);
493
break;
494
case CAPTUREDEVIDE_TYPE::Audio:
495
av_freep(&resampleBuf);
496
break;
497
}
498
#endif
499
}
500
501
void WindowsCaptureDevice::CheckDevices() {
502
isDeviceChanged = true;
503
}
504
505
bool WindowsCaptureDevice::init() {
506
HRESULT hr = S_OK;
507
508
if (!RegisterCMPTMFApis()) {
509
setError(CAPTUREDEVIDE_ERROR_INIT_FAILED, "Cannot register devices");
510
return false;
511
}
512
std::unique_lock<std::mutex> lock(paramMutex);
513
hr = enumDevices();
514
lock.unlock();
515
516
if (FAILED(hr)) {
517
setError(CAPTUREDEVIDE_ERROR_INIT_FAILED, "Cannot enumerate devices");
518
return false;
519
}
520
521
updateState(CAPTUREDEVIDE_STATE::STOPPED);
522
return true;
523
}
524
525
bool WindowsCaptureDevice::start(void *startParam) {
526
HRESULT hr = S_OK;
527
ComPtr<IMFAttributes> pAttributes;
528
ComPtr<IMFMediaType> pType;
529
UINT32 selection = 0;
530
UINT32 count = 0;
531
532
// Release old sources first(if any).
533
m_pSource = nullptr;
534
m_pReader = nullptr;
535
m_pCallback = nullptr;
536
// Need to re-enumerate the list,because old sources were released.
537
std::vector<std::string> deviceList = getDeviceList(true);
538
539
if (deviceList.size() < 1) {
540
setError(CAPTUREDEVIDE_ERROR_START_FAILED, "Has no device");
541
return false;
542
}
543
544
m_pCallback = new ReaderCallback(this);
545
546
std::string selectedDeviceName = type == CAPTUREDEVIDE_TYPE::VIDEO ? g_Config.sCameraDevice : g_Config.sMicDevice;
547
548
switch (state) {
549
case CAPTUREDEVIDE_STATE::STOPPED:
550
for (auto &name : deviceList) {
551
if (name == selectedDeviceName) {
552
selection = count;
553
break;
554
}
555
++count;
556
}
557
setSelection(selection);
558
hr = param.ppDevices[param.selection]->ActivateObject(
559
IID_PPV_ARGS(&m_pSource));
560
561
if (SUCCEEDED(hr))
562
hr = MFCreateAttributes(&pAttributes, 2);
563
564
// Use async mode
565
if (SUCCEEDED(hr))
566
hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, m_pCallback.Get());
567
568
if (SUCCEEDED(hr))
569
hr = pAttributes->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, TRUE);
570
571
if (SUCCEEDED(hr)) {
572
hr = CreateSourceReaderFromMediaSource(
573
m_pSource.Get(),
574
pAttributes.Get(),
575
&m_pReader
576
);
577
}
578
579
if (!m_pReader)
580
hr = E_FAIL;
581
582
if (SUCCEEDED(hr)) {
583
switch (type) {
584
case CAPTUREDEVIDE_TYPE::VIDEO: {
585
if (startParam) {
586
std::vector<int> *resolution = static_cast<std::vector<int>*>(startParam);
587
targetMediaParam.width = resolution->at(0);
588
targetMediaParam.height = resolution->at(1);
589
delete resolution;
590
}
591
#ifdef USE_FFMPEG
592
593
av_freep(&imageRGB);
594
av_freep(&imageJpeg);
595
imageRGB = (unsigned char*)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_RGB24, targetMediaParam.width, targetMediaParam.height, 1));
596
imgJpegSize = av_image_get_buffer_size(AV_PIX_FMT_YUVJ411P, targetMediaParam.width, targetMediaParam.height, 1);
597
imageJpeg = (unsigned char*)av_malloc(imgJpegSize);
598
av_image_fill_linesizes(imgRGBLineSizes, AV_PIX_FMT_RGB24, targetMediaParam.width);
599
#endif
600
601
for (DWORD i = 0; ; i++) {
602
hr = m_pReader->GetNativeMediaType(
603
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
604
i,
605
&pType
606
);
607
608
if (FAILED(hr)) { break; }
609
610
hr = setDeviceParam(pType.Get());
611
612
if (SUCCEEDED(hr))
613
break;
614
}
615
/*
616
hr = m_pReader->GetNativeMediaType(
617
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
618
(DWORD)0xFFFFFFFF,//MF_SOURCE_READER_CURRENT_TYPE_INDEX
619
&pType
620
);
621
if (SUCCEEDED(hr))
622
hr = setDeviceParam(pType);*/ // Don't support on Win7
623
624
// Request the first frame, in async mode, OnReadSample will be called when ReadSample completed.
625
if (SUCCEEDED(hr)) {
626
hr = m_pReader->ReadSample(
627
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
628
0,
629
nullptr,
630
nullptr,
631
nullptr,
632
nullptr
633
);
634
}
635
break;
636
}
637
638
case CAPTUREDEVIDE_TYPE::Audio: {
639
if (startParam) {
640
std::vector<u32> *micParam = static_cast<std::vector<u32>*>(startParam);
641
targetMediaParam.sampleRate = micParam->at(0);
642
targetMediaParam.channels = micParam->at(1);
643
delete micParam;
644
}
645
646
for (DWORD i = 0; ; i++) {
647
hr = m_pReader->GetNativeMediaType(
648
(DWORD)MF_SOURCE_READER_FIRST_AUDIO_STREAM,
649
i,
650
&pType
651
);
652
653
if (FAILED(hr)) { break; }
654
655
hr = setDeviceParam(pType.Get());
656
657
if (SUCCEEDED(hr))
658
break;
659
}
660
661
if (SUCCEEDED(hr)) {
662
hr = m_pReader->ReadSample(
663
(DWORD)MF_SOURCE_READER_FIRST_AUDIO_STREAM,
664
0,
665
nullptr,
666
nullptr,
667
nullptr,
668
nullptr
669
);
670
}
671
break;
672
}
673
}
674
}
675
676
if (FAILED(hr)) {
677
setError(CAPTUREDEVIDE_ERROR_START_FAILED, "Cannot start");
678
if(m_pSource)
679
m_pSource->Shutdown();
680
m_pSource = nullptr;
681
m_pReader = nullptr;
682
return false;
683
}
684
685
updateState(CAPTUREDEVIDE_STATE::STARTED);
686
break;
687
case CAPTUREDEVIDE_STATE::LOST:
688
setError(CAPTUREDEVIDE_ERROR_START_FAILED, "Device has lost");
689
return false;
690
case CAPTUREDEVIDE_STATE::STARTED:
691
setError(CAPTUREDEVIDE_ERROR_START_FAILED, "Device has started");
692
return false;
693
case CAPTUREDEVIDE_STATE::UNINITIALIZED:
694
setError(CAPTUREDEVIDE_ERROR_START_FAILED, "Device doesn't initialize");
695
return false;
696
default:
697
break;
698
}
699
return true;
700
}
701
702
bool WindowsCaptureDevice::stop() {
703
if (state == CAPTUREDEVIDE_STATE::STOPPED)
704
return true;
705
if (m_pSource)
706
m_pSource->Stop();
707
708
updateState(CAPTUREDEVIDE_STATE::STOPPED);
709
710
return true;
711
};
712
713
std::vector<std::string> WindowsCaptureDevice::getDeviceList(bool forceEnum, int *pActuallCount) {
714
HRESULT hr = S_OK;
715
UINT32 count = 0;
716
LPWSTR pwstrName = nullptr;
717
char *cstrName = nullptr;
718
std::string strName;
719
DWORD dwMinSize = 0;
720
std::vector<std::string> deviceList;
721
722
if (isDeviceChanged || forceEnum) {
723
std::unique_lock<std::mutex> lock(paramMutex);
724
for (DWORD i = 0; i < param.count; i++) {
725
SafeRelease(&param.ppDevices[i]);
726
}
727
CoTaskMemFree(param.ppDevices); // Null pointer is okay.
728
729
hr = enumDevices();
730
731
lock.unlock();
732
733
if (SUCCEEDED(hr))
734
isDeviceChanged = false;
735
else
736
return deviceList;
737
}
738
739
for (; count < param.count; count++) {
740
hr = param.ppDevices[count]->GetAllocatedString(
741
MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
742
&pwstrName,
743
nullptr
744
);
745
746
if (SUCCEEDED(hr)) {
747
// Get the size needed first
748
dwMinSize = WideCharToMultiByte(CP_UTF8, 0, pwstrName, -1, nullptr, 0, nullptr, FALSE);
749
if (dwMinSize == 0)
750
hr = E_FAIL;
751
}
752
if (SUCCEEDED(hr)) {
753
cstrName = new char[dwMinSize];
754
WideCharToMultiByte(CP_UTF8, 0, pwstrName, -1, cstrName, dwMinSize, NULL, FALSE);
755
strName = cstrName;
756
delete[] cstrName;
757
758
deviceList.push_back(strName);
759
}
760
761
CoTaskMemFree(pwstrName);
762
763
if (FAILED(hr)) {
764
setError(CAPTUREDEVIDE_ERROR_GETNAMES_FAILED, "Error occurred,gotten " + std::to_string((int)count) + " device names");
765
if(pActuallCount)
766
*pActuallCount = count;
767
return deviceList;
768
}
769
}
770
if (pActuallCount)
771
*pActuallCount = count + 1;
772
return deviceList;
773
}
774
775
HRESULT WindowsCaptureDevice::setDeviceParam(IMFMediaType *pType) {
776
HRESULT hr = S_OK;
777
GUID subtype = { 0 };
778
bool getFormat = false;
779
780
switch (type) {
781
case CAPTUREDEVIDE_TYPE::VIDEO:
782
hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
783
if (FAILED(hr))
784
break;
785
786
for (int i = 0; i < g_cVideoFormats; i++) {
787
if (subtype == g_VideoFormats[i].MFVideoFormat) {
788
deviceParam.videoFormat = subtype;
789
getFormat = true;
790
break;
791
}
792
}
793
794
if (!getFormat) {
795
for (int i = 0; i < g_cVideoFormats; i++) {
796
hr = pType->SetGUID(MF_MT_SUBTYPE, g_VideoFormats[i].MFVideoFormat);
797
if (FAILED(hr))
798
continue;
799
800
hr = m_pReader->SetCurrentMediaType(
801
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
802
NULL,
803
pType
804
);
805
806
if (SUCCEEDED(hr)) {
807
deviceParam.videoFormat = g_VideoFormats[i].MFVideoFormat;
808
getFormat = true;
809
break;
810
}
811
}
812
}
813
if (SUCCEEDED(hr))
814
hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &deviceParam.width, &deviceParam.height);
815
816
if (SUCCEEDED(hr))
817
hr = GetDefaultStride(pType, &deviceParam.default_stride);
818
819
break;
820
case CAPTUREDEVIDE_TYPE::Audio:
821
hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
822
if (FAILED(hr))
823
break;
824
825
for (int i = 0; i < g_cVideoFormats; i++) {
826
if (subtype == g_AudioFormats[i].MFAudioFormat) {
827
deviceParam.audioFormat = subtype;
828
getFormat = true;
829
break;
830
}
831
}
832
833
if (!getFormat) {
834
for (int i = 0; i < g_cAudioFormats; i++) {
835
hr = pType->SetGUID(MF_MT_SUBTYPE, g_AudioFormats[i].MFAudioFormat);
836
if (FAILED(hr))
837
continue;
838
839
hr = m_pReader->SetCurrentMediaType(
840
(DWORD)MF_SOURCE_READER_FIRST_AUDIO_STREAM,
841
NULL,
842
pType
843
);
844
845
if (SUCCEEDED(hr)) {
846
deviceParam.audioFormat = g_AudioFormats[i].MFAudioFormat;
847
getFormat = true;
848
break;
849
}
850
}
851
}
852
if (SUCCEEDED(hr))
853
hr = pType->GetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, &deviceParam.sampleRate);
854
855
if (SUCCEEDED(hr))
856
hr = pType->GetUINT32(MF_MT_AUDIO_NUM_CHANNELS, &deviceParam.channels);
857
858
if (SUCCEEDED(hr))
859
hr = pType->GetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, (UINT32 *)&deviceParam.bitsPerSample);
860
861
break;
862
}
863
864
return hr;
865
}
866
867
void WindowsCaptureDevice::sendMessage(CAPTUREDEVIDE_MESSAGE message) {
868
// Must be unique lock
869
std::unique_lock<std::mutex> lock(mutex);
870
messageQueue.push(message);
871
cond.notify_one();
872
}
873
874
CAPTUREDEVIDE_MESSAGE WindowsCaptureDevice::getMessage() {
875
// Must be unique lock
876
std::unique_lock<std::mutex> lock(mutex);
877
CAPTUREDEVIDE_MESSAGE message;
878
cond.wait(lock, [this]() { return !messageQueue.empty(); });
879
message = messageQueue.front();
880
messageQueue.pop();
881
882
return message;
883
}
884
885
void WindowsCaptureDevice::updateState(const CAPTUREDEVIDE_STATE &newState) {
886
state = newState;
887
if (isShutDown()) {
888
std::unique_lock<std::mutex> guard(stateMutex_);
889
stateCond_.notify_all();
890
}
891
}
892
893
void WindowsCaptureDevice::waitShutDown() {
894
sendMessage({ CAPTUREDEVIDE_COMMAND::SHUTDOWN, nullptr });
895
896
std::unique_lock<std::mutex> guard(stateMutex_);
897
while (!isShutDown()) {
898
stateCond_.wait(guard);
899
}
900
}
901
902
void WindowsCaptureDevice::messageHandler() {
903
CoInitializeEx(NULL, COINIT_MULTITHREADED);
904
MFStartup(MF_VERSION);
905
CAPTUREDEVIDE_MESSAGE message;
906
907
if (type == CAPTUREDEVIDE_TYPE::VIDEO) {
908
SetCurrentThreadName("Camera");
909
} else if (type == CAPTUREDEVIDE_TYPE::Audio) {
910
SetCurrentThreadName("Microphone");
911
}
912
913
while ((message = getMessage()).command != CAPTUREDEVIDE_COMMAND::SHUTDOWN) {
914
switch (message.command) {
915
case CAPTUREDEVIDE_COMMAND::INITIALIZE:
916
init();
917
break;
918
case CAPTUREDEVIDE_COMMAND::START:
919
start(message.opacity);
920
break;
921
case CAPTUREDEVIDE_COMMAND::STOP:
922
stop();
923
break;
924
case CAPTUREDEVIDE_COMMAND::UPDATE_STATE:
925
updateState((*(CAPTUREDEVIDE_STATE *)message.opacity));
926
break;
927
case CAPTUREDEVIDE_COMMAND::SHUTDOWN:
928
break;
929
}
930
}
931
932
if (state != CAPTUREDEVIDE_STATE::STOPPED)
933
stop();
934
935
std::lock_guard<std::mutex> lock(sdMutex);
936
m_pSource = nullptr;
937
m_pReader = nullptr;
938
m_pCallback = nullptr;
939
unRegisterCMPTMFApis();
940
941
std::unique_lock<std::mutex> lock2(paramMutex);
942
for (DWORD i = 0; i < param.count; i++) {
943
SafeRelease(&param.ppDevices[i]);
944
}
945
CoTaskMemFree(param.ppDevices); // Null pointer is okay.
946
lock2.unlock();
947
948
MFShutdown();
949
CoUninitialize();
950
951
updateState(CAPTUREDEVIDE_STATE::SHUTDOWN);
952
}
953
954
HRESULT WindowsCaptureDevice::enumDevices() {
955
HRESULT hr = S_OK;
956
ComPtr<IMFAttributes> pAttributes;
957
958
hr = MFCreateAttributes(&pAttributes, 1);
959
if (SUCCEEDED(hr)) {
960
switch (type) {
961
case CAPTUREDEVIDE_TYPE::VIDEO:
962
hr = pAttributes->SetGUID(
963
MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
964
MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
965
);
966
967
break;
968
case CAPTUREDEVIDE_TYPE::Audio:
969
hr = pAttributes->SetGUID(
970
MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
971
MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID
972
);
973
974
break;
975
default:
976
setError(CAPTUREDEVIDE_ERROR_UNKNOWN_TYPE, "Unknown device type");
977
return E_FAIL;
978
}
979
}
980
if (SUCCEEDED(hr)) {
981
hr = EnumDeviceSources(pAttributes.Get(), &param.ppDevices, &param.count);
982
}
983
984
return hr;
985
}
986
987
bool WindowsCaptureDevice::needResample() {
988
return deviceParam.sampleRate != targetMediaParam.sampleRate ||
989
deviceParam.channels != targetMediaParam.channels ||
990
deviceParam.audioFormat != targetMediaParam.audioFormat ||
991
deviceParam.bitsPerSample != targetMediaParam.bitsPerSample;
992
}
993
994
//-----------------------------------------------------------------------------
995
// GetDefaultStride
996
//
997
// Gets the default stride for a video frame, assuming no extra padding bytes.
998
//
999
//-----------------------------------------------------------------------------
1000
1001
HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride)
1002
{
1003
LONG lStride = 0;
1004
1005
// Try to get the default stride from the media type.
1006
HRESULT hr = pType->GetUINT32(MF_MT_DEFAULT_STRIDE, (UINT32*)&lStride);
1007
if (FAILED(hr))
1008
{
1009
// Attribute not set. Try to calculate the default stride.
1010
GUID subtype = GUID_NULL;
1011
1012
UINT32 width = 0;
1013
UINT32 height = 0;
1014
1015
// Get the subtype and the image size.
1016
hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
1017
if (SUCCEEDED(hr))
1018
{
1019
hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);
1020
}
1021
if (SUCCEEDED(hr))
1022
{
1023
hr = GetStrideForBitmapInfoHeader(subtype.Data1, width, &lStride);
1024
}
1025
1026
// Set the attribute for later reference.
1027
if (SUCCEEDED(hr))
1028
{
1029
(void)pType->SetUINT32(MF_MT_DEFAULT_STRIDE, UINT32(lStride));
1030
}
1031
}
1032
1033
if (SUCCEEDED(hr))
1034
{
1035
*plStride = lStride;
1036
}
1037
return hr;
1038
}
1039
1040