-
-
Save zhiqiang-li/16d1a6a1b00e8fb39847c8ca323b5604 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#include <shlwapi.h> | |
#include "CameraController.h" | |
#include "VideoBufferLock.h" | |
#include "../Model/CameraSourceModel.h" | |
#include "../Helper/Timer.h" | |
BeginShootStudioNameSpace | |
template <class T> void SafeRelease(T **ppT) { | |
if (*ppT) { | |
(*ppT)->Release(); | |
*ppT = NULL; | |
} | |
} | |
CameraDeviceParam CameraController::kDeviceParam = { 0 }; | |
bool CameraController::GetCameraDevices() { | |
HRESULT hr = S_OK; | |
IMFAttributes * attributes = NULL; | |
ReleaseCameraDevices(); | |
// Initialize an attribute store to specify enumeration parameters. | |
hr = MFCreateAttributes(&attributes, 1); | |
if (FAILED(hr)) { | |
SafeRelease(&attributes); | |
return false; | |
} | |
hr = attributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); | |
if (FAILED(hr)) { | |
SafeRelease(&attributes); | |
return false; | |
} | |
// Enumerate devices. | |
hr = MFEnumDeviceSources(attributes, &kDeviceParam.mDevices, &kDeviceParam.mCount); | |
SafeRelease(&attributes); | |
return !FAILED(hr); | |
} | |
void CameraController::ReleaseCameraDevices() { | |
for (DWORD i = 0; i < kDeviceParam.mCount; i++) { | |
SafeRelease(&kDeviceParam.mDevices[i]); | |
} | |
CoTaskMemFree(kDeviceParam.mDevices); | |
kDeviceParam.mCount = 0; | |
kDeviceParam.mDevices = nullptr; | |
kDeviceParam.mSelection = 0; | |
} | |
QString CameraController::GetCameraName(IMFActivate * camera) { | |
if (camera) { | |
WCHAR *szFriendlyName = NULL; | |
camera->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &szFriendlyName, NULL); | |
QString name = QString::fromWCharArray(szFriendlyName); | |
CoTaskMemFree(szFriendlyName); | |
return name; | |
} | |
return ""; | |
} | |
CameraController::CameraController() : | |
mActive(NULL), | |
mColorTransform(NULL), | |
mOutputMediaType(NULL), | |
mSourceReader(NULL), | |
mMediaSource(NULL), | |
mAttributes(NULL), | |
mMediaType(NULL), | |
mSymbolicLink(NULL), | |
mSymbolicLinkSize(0), | |
mRefCount(1), | |
mWidth(0), | |
mHeight(0), | |
mDefaultStride(0), | |
mCameraImage(NULL), | |
mCameraSource(nullptr) | |
{ | |
InitializeCriticalSection(&mLock); | |
GetCameraDevices(); | |
} | |
CameraController::~CameraController() { | |
free(mCameraImage); | |
delete mCameraSource; | |
closeDevice(); | |
for (size_t i = 0; i < mAllMediaTypes.size(); i++) { | |
SafeRelease(&mAllMediaTypes[i]); | |
} | |
ReleaseCameraDevices(); | |
DeleteCriticalSection(&mLock); | |
} | |
bool CameraController::setDevice(IMFActivate * active) { | |
if (mActive == active) | |
{ | |
return true; | |
} | |
closeDevice(); | |
EnterCriticalSection(&mLock); | |
mActive = active; | |
HRESULT hr = active->ActivateObject(__uuidof(IMFMediaSource), (void**)&mMediaSource); | |
if (SUCCEEDED(hr)) { | |
hr = active->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &mSymbolicLink, &mSymbolicLinkSize); | |
} | |
if (SUCCEEDED(hr)) { | |
hr = MFCreateAttributes(&mAttributes, 2); | |
} | |
if (SUCCEEDED(hr)) { | |
hr = mAttributes->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, TRUE); | |
} | |
// Set the callback pointer. | |
if (SUCCEEDED(hr)) | |
{ | |
hr = mAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this); | |
} | |
if (SUCCEEDED(hr)) { | |
hr = MFCreateSourceReaderFromMediaSource(mMediaSource, mAttributes, &mSourceReader); | |
} | |
if (SUCCEEDED(hr)) { | |
for (DWORD i = 0; ; i++) { | |
IMFMediaType * type = NULL; | |
hr = mSourceReader->GetNativeMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, i, &type); | |
if (type) { | |
if (IsVideoFormatSupported(type)) { | |
mAllMediaTypes.push_back(type); | |
} | |
else { | |
type->Release(); | |
} | |
} | |
if (FAILED(hr)) { break; } | |
} | |
} | |
LeaveCriticalSection(&mLock); | |
return SUCCEEDED(hr); | |
} | |
void CameraController::closeDevice() { | |
EnterCriticalSection(&mLock); | |
if (mColorTransform) | |
{ | |
mColorTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0); | |
mColorTransform->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, 0); | |
} | |
SafeRelease(&mColorTransform); | |
SafeRelease(&mOutputMediaType); | |
if (mSourceReader) { | |
SafeRelease(&mSourceReader); | |
} | |
if (mSymbolicLink) { | |
CoTaskMemFree(mSymbolicLink); | |
mSymbolicLink = NULL; | |
mSymbolicLinkSize = 0; | |
} | |
SafeRelease(&mAttributes); | |
LeaveCriticalSection(&mLock); | |
} | |
bool CameraController::setMediaType(IMFMediaType * mediaType) { | |
EnterCriticalSection(&mLock); | |
if (mSourceReader) | |
{ | |
GUID subtype{ 0 }; | |
UINT32 frameRate = 0; | |
UINT32 denominator = 0; | |
DWORD32 width, height; | |
HRESULT hr = mediaType->GetGUID(MF_MT_SUBTYPE, &subtype); | |
hr = MFGetAttributeSize(mediaType, MF_MT_FRAME_SIZE, &width, &height); | |
hr = MFGetAttributeRatio(mediaType, MF_MT_FRAME_RATE, &frameRate, &denominator); | |
mWidth = width; | |
mHeight = height; | |
hr = GetDefaultStride(mediaType, &mDefaultStride); | |
mCameraImage = (BYTE *)malloc(width * height * 4 * sizeof(BYTE)); | |
mColorConvertFunc = GetColorConvertFunc(subtype); | |
delete mCameraSource; | |
mCameraSource = new CameraSourceModel(width, height, (float)frameRate / (float)denominator); | |
IMFActivate** activates = nullptr; | |
UINT32 num_activates = 0; // will be 1 | |
MFTEnumEx( | |
MFT_CATEGORY_VIDEO_PROCESSOR, | |
MFT_ENUM_FLAG_ALL, | |
nullptr, nullptr, &activates, &num_activates); | |
activates[0]->ActivateObject(__uuidof(IMFTransform), (void**)&mColorTransform); | |
SafeRelease(&activates[0]); | |
CoTaskMemFree(activates); | |
mColorTransform->SetInputType(0, mediaType, 0); | |
MFCreateMediaType(&mOutputMediaType); | |
mOutputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | |
mOutputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_ARGB32); | |
mOutputMediaType->SetUINT32(MF_MT_DEFAULT_STRIDE, width * 4); | |
MFSetAttributeRatio((IMFAttributes*)mOutputMediaType, MF_MT_FRAME_RATE, frameRate, denominator); | |
MFSetAttributeSize((IMFAttributes*)mOutputMediaType, MF_MT_FRAME_SIZE, width, height); | |
mOutputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); | |
mOutputMediaType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); | |
MFSetAttributeRatio((IMFAttributes*)mOutputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); | |
mColorTransform->SetOutputType(0, mOutputMediaType, 0); | |
mColorTransform->ProcessEvent(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0); | |
hr = mSourceReader->SetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, mediaType); | |
LeaveCriticalSection(&mLock); | |
return SUCCEEDED(hr); | |
} | |
LeaveCriticalSection(&mLock); | |
return false; | |
} | |
bool CameraController::start() { | |
EnterCriticalSection(&mLock); | |
if (mSourceReader) { | |
HRESULT hr = mSourceReader->ReadSample((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL); | |
LeaveCriticalSection(&mLock); | |
return SUCCEEDED(hr); | |
} | |
LeaveCriticalSection(&mLock); | |
return false; | |
} | |
CameraSourceModel* CameraController::getSourceModel() { | |
return mCameraSource; | |
} | |
const std::vector<IMFMediaType * > * CameraController::allMediaTypes() { | |
return &mAllMediaTypes; | |
} | |
HRESULT initOutputDataBuffer(IMFTransform* aPtrTransform, | |
MFT_OUTPUT_DATA_BUFFER& aRefOutputBuffer, | |
IMFMediaBuffer* lMediaBuffer) | |
{ | |
HRESULT lresult; | |
MFT_OUTPUT_STREAM_INFO loutputStreamInfo; | |
DWORD loutputStreamId = 0; | |
IMFSample* lOutputSample; | |
do{ | |
if (aPtrTransform == nullptr) { | |
lresult = E_POINTER; | |
break; | |
} | |
ZeroMemory(&loutputStreamInfo, sizeof(loutputStreamInfo)); | |
ZeroMemory(&aRefOutputBuffer, sizeof(aRefOutputBuffer)); | |
lresult = aPtrTransform->GetOutputStreamInfo(loutputStreamId, &loutputStreamInfo); | |
if (lresult) { | |
break; | |
} | |
if ((loutputStreamInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) == 0 && | |
(loutputStreamInfo.dwFlags & MFT_OUTPUT_STREAM_CAN_PROVIDE_SAMPLES) == 0) { | |
lresult = MFCreateSample(&lOutputSample); | |
if (lresult) { | |
break; | |
} | |
lresult = lOutputSample->AddBuffer(lMediaBuffer); | |
if (lresult) { | |
break; | |
} | |
aRefOutputBuffer.pSample = lOutputSample; | |
} else { | |
lresult = S_OK; | |
} | |
aRefOutputBuffer.dwStreamID = loutputStreamId; | |
} while (false); | |
return lresult; | |
} | |
HRESULT CameraController::OnReadSample(HRESULT hrStatus, DWORD streamIndex, DWORD streamFlags, LONGLONG timestamp, IMFSample *pSample) { | |
HRESULT hr = S_OK; | |
EnterCriticalSection(&mLock); | |
if (FAILED(hrStatus)) { | |
hr = hrStatus; | |
} | |
if (SUCCEEDED(hr)) { | |
if (pSample) { | |
Timer timer; | |
mColorTransform->ProcessInput(0, pSample, 0); | |
DWORD status = 0; | |
mColorTransform->GetOutputStatus(&status); | |
if (status & MFT_OUTPUT_STATUS_SAMPLE_READY) | |
{ | |
IMFMediaBuffer* mMeidaBuffer; | |
MFT_OUTPUT_DATA_BUFFER mArgb32 = { 0 }; | |
MFCreate2DMediaBuffer(mWidth, mHeight, 21, false, &mMeidaBuffer); | |
initOutputDataBuffer(mColorTransform, mArgb32, mMeidaBuffer); | |
hr = mColorTransform->ProcessOutput(0, 1, &mArgb32, &status); | |
if (mArgb32.pEvents) { | |
mArgb32.pEvents->Release(); | |
mArgb32.pEvents = nullptr; | |
} | |
unsigned char* framedata = nullptr; | |
DWORD currentLength; | |
mMeidaBuffer->Lock(&framedata, NULL, ¤tLength); | |
mCameraSource->copyBuffer(framedata); | |
qDebug() << "IMFTransform time:" << timer.elapsed() << "data:" << framedata[0] << "," << framedata[1] << "," << framedata[2] << "," << framedata[3]; | |
mMeidaBuffer->Unlock(); | |
SafeRelease(&mMeidaBuffer); | |
if (mArgb32.pSample) { | |
mArgb32.pSample->RemoveAllBuffers(); | |
} | |
SafeRelease(&mArgb32.pSample); | |
if (!SUCCEEDED(hr)) | |
{ | |
qDebug() << "IMFTransform fail:" << hr; | |
} | |
} | |
else { | |
IMFMediaBuffer *pBuffer = NULL; | |
hr = pSample->GetBufferByIndex(0, &pBuffer); | |
VideoBufferLock buffer(pBuffer); | |
BYTE *pbScanline0 = NULL; | |
LONG lStride = 0; | |
hr = buffer.LockBuffer(mDefaultStride, mHeight, &pbScanline0, &lStride); | |
mColorConvertFunc(mCameraImage, mWidth * 4 * sizeof(BYTE), pbScanline0, lStride, mWidth, mHeight); | |
qDebug() << "convert time:" << timer.elapsed(); | |
timer.reset(); | |
mCameraSource->copyBuffer(mCameraImage); | |
qDebug() << "copy time:" << timer.elapsed(); | |
SafeRelease(&pBuffer); | |
} | |
} | |
} | |
// Request the next frame. | |
hr = mSourceReader->ReadSample((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL); | |
if (FAILED(hr)) | |
{ | |
//NotifyError(hr); | |
} | |
LeaveCriticalSection(&mLock); | |
return hr; | |
} | |
ULONG CameraController::AddRef() | |
{ | |
return InterlockedIncrement(&mRefCount); | |
} | |
ULONG CameraController::Release() | |
{ | |
ULONG uCount = InterlockedDecrement(&mRefCount); | |
if (uCount == 0) { | |
delete this; | |
} | |
return uCount; | |
} | |
HRESULT CameraController::QueryInterface(REFIID riid, void** ppv) | |
{ | |
static const QITAB qit[] = | |
{ | |
QITABENT(CameraController, IMFSourceReaderCallback), | |
{ 0 }, | |
}; | |
return QISearch(this, qit, riid, ppv); | |
} | |
#ifndef IF_EQUAL_RETURN | |
#define IF_EQUAL_RETURN(param, val) if(val == param) return QString::fromWCharArray(L#val) | |
#endif | |
QString CameraController::GetVideoFormatName(const GUID& guid) { | |
IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE); | |
IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE); | |
IF_EQUAL_RETURN(guid, MF_MT_SUBTYPE); | |
IF_EQUAL_RETURN(guid, MF_MT_ALL_SAMPLES_INDEPENDENT); | |
IF_EQUAL_RETURN(guid, MF_MT_FIXED_SIZE_SAMPLES); | |
IF_EQUAL_RETURN(guid, MF_MT_COMPRESSED); | |
IF_EQUAL_RETURN(guid, MF_MT_SAMPLE_SIZE); | |
IF_EQUAL_RETURN(guid, MF_MT_WRAPPED_TYPE); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_NUM_CHANNELS); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_SECOND); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FLOAT_SAMPLES_PER_SECOND); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_AVG_BYTES_PER_SECOND); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BLOCK_ALIGNMENT); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BITS_PER_SAMPLE); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_VALID_BITS_PER_SAMPLE); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_BLOCK); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_CHANNEL_MASK); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FOLDDOWN_MATRIX); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKREF); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKTARGET); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGREF); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGTARGET); | |
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_PREFER_WAVEFORMATEX); | |
IF_EQUAL_RETURN(guid, MF_MT_FRAME_SIZE); | |
IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE); | |
IF_EQUAL_RETURN(guid, MF_MT_PIXEL_ASPECT_RATIO); | |
IF_EQUAL_RETURN(guid, MF_MT_DRM_FLAGS); | |
IF_EQUAL_RETURN(guid, MF_MT_PAD_CONTROL_FLAGS); | |
IF_EQUAL_RETURN(guid, MF_MT_SOURCE_CONTENT_HINT); | |
IF_EQUAL_RETURN(guid, MF_MT_VIDEO_CHROMA_SITING); | |
IF_EQUAL_RETURN(guid, MF_MT_INTERLACE_MODE); | |
IF_EQUAL_RETURN(guid, MF_MT_TRANSFER_FUNCTION); | |
IF_EQUAL_RETURN(guid, MF_MT_VIDEO_PRIMARIES); | |
IF_EQUAL_RETURN(guid, MF_MT_CUSTOM_VIDEO_PRIMARIES); | |
IF_EQUAL_RETURN(guid, MF_MT_YUV_MATRIX); | |
IF_EQUAL_RETURN(guid, MF_MT_VIDEO_LIGHTING); | |
IF_EQUAL_RETURN(guid, MF_MT_VIDEO_NOMINAL_RANGE); | |
IF_EQUAL_RETURN(guid, MF_MT_GEOMETRIC_APERTURE); | |
IF_EQUAL_RETURN(guid, MF_MT_MINIMUM_DISPLAY_APERTURE); | |
IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_APERTURE); | |
IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_ENABLED); | |
IF_EQUAL_RETURN(guid, MF_MT_AVG_BITRATE); | |
IF_EQUAL_RETURN(guid, MF_MT_AVG_BIT_ERROR_RATE); | |
IF_EQUAL_RETURN(guid, MF_MT_MAX_KEYFRAME_SPACING); | |
IF_EQUAL_RETURN(guid, MF_MT_DEFAULT_STRIDE); | |
IF_EQUAL_RETURN(guid, MF_MT_PALETTE); | |
IF_EQUAL_RETURN(guid, MF_MT_USER_DATA); | |
IF_EQUAL_RETURN(guid, MF_MT_AM_FORMAT_TYPE); | |
IF_EQUAL_RETURN(guid, MF_MT_MPEG_START_TIME_CODE); | |
IF_EQUAL_RETURN(guid, MF_MT_MPEG2_PROFILE); | |
IF_EQUAL_RETURN(guid, MF_MT_MPEG2_LEVEL); | |
IF_EQUAL_RETURN(guid, MF_MT_MPEG2_FLAGS); | |
IF_EQUAL_RETURN(guid, MF_MT_MPEG_SEQUENCE_HEADER); | |
IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_0); | |
IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_0); | |
IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_1); | |
IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_1); | |
IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_SRC_PACK); | |
IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_CTRL_PACK); | |
#if (WINVER >= _WIN32_WINNT_WIN7) | |
IF_EQUAL_RETURN(guid, MF_MT_AAC_PAYLOAD_TYPE); | |
IF_EQUAL_RETURN(guid, MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION); | |
IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_HEADER); | |
IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_FORMAT); | |
IF_EQUAL_RETURN(guid, MF_MT_IMAGE_LOSS_TOLERANT); | |
IF_EQUAL_RETURN(guid, MF_MT_MPEG4_SAMPLE_DESCRIPTION); | |
IF_EQUAL_RETURN(guid, MF_MT_MPEG4_CURRENT_SAMPLE_ENTRY); | |
IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_4CC); | |
IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_WAVE_FORMAT_TAG); | |
IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MIN); | |
IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MAX); | |
#endif | |
// Media types | |
IF_EQUAL_RETURN(guid, MFMediaType_Default); | |
IF_EQUAL_RETURN(guid, MFMediaType_Audio); | |
IF_EQUAL_RETURN(guid, MFMediaType_Video); | |
IF_EQUAL_RETURN(guid, MFMediaType_Protected); | |
IF_EQUAL_RETURN(guid, MFMediaType_SAMI); | |
IF_EQUAL_RETURN(guid, MFMediaType_Script); | |
IF_EQUAL_RETURN(guid, MFMediaType_Image); | |
IF_EQUAL_RETURN(guid, MFMediaType_HTML); | |
IF_EQUAL_RETURN(guid, MFMediaType_Binary); | |
IF_EQUAL_RETURN(guid, MFMediaType_FileTransfer); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_RGB32); // D3DFMT_X8R8G8B8 ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_ARGB32); // D3DFMT_A8R8G8B8 ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_RGB24); // D3DFMT_R8G8B8 ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_RGB555); // D3DFMT_X1R5G5B5 ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_RGB565); // D3DFMT_R5G6B5 ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_AI44); // FCC('AI44') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_AYUV); // FCC('AYUV') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_YUY2); // FCC('YUY2') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_UYVY); // FCC('UYVY') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_NV11); // FCC('NV11') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_NV12); // FCC('NV12') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_YV12); // FCC('YV12') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_IYUV); // FCC('IYUV') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_Y210); // FCC('Y210') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_Y216); // FCC('Y216') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_Y410); // FCC('Y410') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_Y416); // FCC('Y416') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_P210); // FCC('P210') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_P216); // FCC('P216') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_P010); // FCC('P010') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_P016); // FCC('P016') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_v210); // FCC('v210') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_v410); // FCC('v410') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_MP43); // FCC('MP43') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_MP4S); // FCC('MP4S') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_M4S2); // FCC('M4S2') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_MP4V); // FCC('MP4V') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_WMV1); // FCC('WMV1') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_WMV2); // FCC('WMV2') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_WMV3); // FCC('WMV3') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_WVC1); // FCC('WVC1') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_MSS1); // FCC('MSS1') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_MSS2); // FCC('MSS2') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_MPG1); // FCC('MPG1') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_DVSL); // FCC('dvsl') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_DVSD); // FCC('dvsd') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_DV25); // FCC('dv25') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_DV50); // FCC('dv50') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_DVH1); // FCC('dvh1') ); | |
#if (WINVER >= _WIN32_WINNT_WIN7) | |
IF_EQUAL_RETURN(guid, MFVideoFormat_I420); // FCC('I420') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_H264); // FCC('H264') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_DVHD); // FCC('dvhd') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_DVH1); // FCC('dvh1') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_DVC); // FCC('dvc ') ); | |
IF_EQUAL_RETURN(guid, MFVideoFormat_MJPG); // FCC('MJPG') ); | |
#endif | |
IF_EQUAL_RETURN(guid, MFAudioFormat_PCM); // WAVE_FORMAT_PCM ); | |
IF_EQUAL_RETURN(guid, MFAudioFormat_Float); // WAVE_FORMAT_IEEE_FLOAT ); | |
IF_EQUAL_RETURN(guid, MFAudioFormat_DTS); // WAVE_FORMAT_DTS ); | |
IF_EQUAL_RETURN(guid, MFAudioFormat_Dolby_AC3_SPDIF); // WAVE_FORMAT_DOLBY_AC3_SPDIF ); | |
IF_EQUAL_RETURN(guid, MFAudioFormat_DRM); // WAVE_FORMAT_DRM ); | |
IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV8); // WAVE_FORMAT_WMAUDIO2 ); | |
IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV9); // WAVE_FORMAT_WMAUDIO3 ); | |
IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudio_Lossless); // WAVE_FORMAT_WMAUDIO_LOSSLESS ); | |
IF_EQUAL_RETURN(guid, MFAudioFormat_WMASPDIF); // WAVE_FORMAT_WMASPDIF ); | |
IF_EQUAL_RETURN(guid, MFAudioFormat_MSP1); // WAVE_FORMAT_WMAVOICE9 ); | |
IF_EQUAL_RETURN(guid, MFAudioFormat_MP3); // WAVE_FORMAT_MPEGLAYER3 ); | |
IF_EQUAL_RETURN(guid, MFAudioFormat_MPEG); // WAVE_FORMAT_MPEG ); | |
#if (WINVER >= _WIN32_WINNT_WIN7) | |
IF_EQUAL_RETURN(guid, MFAudioFormat_AAC); // WAVE_FORMAT_MPEG_HEAAC ); | |
IF_EQUAL_RETURN(guid, MFAudioFormat_ADTS); // WAVE_FORMAT_MPEG_ADTS_AAC ); | |
#endif | |
return NULL; | |
} | |
//------------------------------------------------------------------- | |
// | |
// Conversion functions | |
// | |
//------------------------------------------------------------------- | |
__forceinline BYTE Clip(int clr) | |
{ | |
return (BYTE)(clr < 0 ? 0 : (clr > 255 ? 255 : clr)); | |
} | |
__forceinline RGBQUAD ConvertYCrCbToRGBA( | |
int y, | |
int cr, | |
int cb | |
) | |
{ | |
RGBQUAD rgbq; | |
int c = y - 16; | |
int d = cb - 128; | |
int e = cr - 128; | |
rgbq.rgbRed = Clip((298 * c + 409 * e + 128) >> 8); | |
rgbq.rgbGreen = Clip((298 * c - 100 * d - 208 * e + 128) >> 8); | |
rgbq.rgbBlue = Clip((298 * c + 516 * d + 128) >> 8); | |
rgbq.rgbReserved = 0xff; | |
return rgbq; | |
} | |
//------------------------------------------------------------------- | |
// TransformImage_RGB24 | |
// | |
// RGB-24 to RGB-32 | |
//------------------------------------------------------------------- | |
void TransformImage_RGB24( | |
BYTE* pDest, | |
LONG lDestStride, | |
const BYTE* pSrc, | |
LONG lSrcStride, | |
DWORD dwWidthInPixels, | |
DWORD dwHeightInPixels | |
) | |
{ | |
for (DWORD y = 0; y < dwHeightInPixels; y++) | |
{ | |
RGBTRIPLE *pSrcPel = (RGBTRIPLE*)pSrc; | |
RGBQUAD *pDestPel = (RGBQUAD*)pDest; | |
for (DWORD x = 0; x < dwWidthInPixels; x++) | |
{ | |
pDestPel[x].rgbRed = pSrcPel[x].rgbtRed; | |
pDestPel[x].rgbBlue = pSrcPel[x].rgbtBlue; | |
pDestPel[x].rgbGreen = pSrcPel[x].rgbtGreen; | |
pDestPel[x].rgbReserved = 0xff; | |
} | |
pSrc += lSrcStride; | |
pDest += lDestStride; | |
} | |
} | |
//------------------------------------------------------------------- | |
// TransformImage_RGB32 | |
// | |
// RGB-32 to RGB-32 | |
// | |
// Note: This function is needed to copy the image from system | |
// memory to the Direct3D surface. | |
//------------------------------------------------------------------- | |
void TransformImage_RGB32(BYTE* pDest, LONG lDestStride, const BYTE* pSrc, LONG lSrcStride, DWORD dwWidthInPixels, DWORD dwHeightInPixels) | |
{ | |
MFCopyImage(pDest, lDestStride, pSrc, lSrcStride, dwWidthInPixels * 4, dwHeightInPixels); | |
} | |
//------------------------------------------------------------------- | |
// TransformImage_YUY2 | |
// | |
// YUY2 to RGBA-32 | |
//------------------------------------------------------------------- | |
void TransformImage_YUY2(BYTE* pDest, LONG lDestStride, const BYTE* pSrc, LONG lSrcStride, DWORD dwWidthInPixels, DWORD dwHeightInPixels) | |
{ | |
for (DWORD y = 0; y < dwHeightInPixels; y++) | |
{ | |
RGBQUAD *pDestPel = (RGBQUAD*)pDest; | |
WORD *pSrcPel = (WORD*)pSrc; | |
for (DWORD x = 0; x < dwWidthInPixels; x += 2) | |
{ | |
// Byte order is U0 Y0 V0 Y1 | |
int y0 = (int)LOBYTE(pSrcPel[x]); | |
int u0 = (int)HIBYTE(pSrcPel[x]); | |
int y1 = (int)LOBYTE(pSrcPel[x + 1]); | |
int v0 = (int)HIBYTE(pSrcPel[x + 1]); | |
pDestPel[x] = ConvertYCrCbToRGBA(y0, v0, u0); | |
pDestPel[x + 1] = ConvertYCrCbToRGBA(y1, v0, u0); | |
} | |
pSrc += lSrcStride; | |
pDest += lDestStride; | |
} | |
} | |
//------------------------------------------------------------------- | |
// TransformImage_NV12 | |
// | |
// NV12 to RGBA-32 | |
//------------------------------------------------------------------- | |
void TransformImage_NV12(BYTE* pDst, LONG dstStride, const BYTE* pSrc, LONG srcStride, DWORD dwWidthInPixels, DWORD dwHeightInPixels) | |
{ | |
const BYTE* lpBitsY = pSrc; | |
const BYTE* lpBitsCb = lpBitsY + (dwHeightInPixels * srcStride);; | |
const BYTE* lpBitsCr = lpBitsCb + 1; | |
for (UINT y = 0; y < dwHeightInPixels; y += 2) | |
{ | |
const BYTE* lpLineY1 = lpBitsY; | |
const BYTE* lpLineY2 = lpBitsY + srcStride; | |
const BYTE* lpLineCr = lpBitsCr; | |
const BYTE* lpLineCb = lpBitsCb; | |
LPBYTE lpDibLine1 = pDst; | |
LPBYTE lpDibLine2 = pDst + dstStride; | |
for (UINT x = 0; x < dwWidthInPixels; x += 2) | |
{ | |
int y0 = (int)lpLineY1[0]; | |
int y1 = (int)lpLineY1[1]; | |
int y2 = (int)lpLineY2[0]; | |
int y3 = (int)lpLineY2[1]; | |
int cb = (int)lpLineCb[0]; | |
int cr = (int)lpLineCr[0]; | |
RGBQUAD r = ConvertYCrCbToRGBA(y0, cr, cb); | |
lpDibLine1[0] = r.rgbRed; | |
lpDibLine1[1] = r.rgbGreen; | |
lpDibLine1[2] = r.rgbBlue; | |
lpDibLine1[3] = 0xff; // Alpha | |
r = ConvertYCrCbToRGBA(y1, cr, cb); | |
lpDibLine1[4] = r.rgbRed; | |
lpDibLine1[5] = r.rgbGreen; | |
lpDibLine1[6] = r.rgbBlue; | |
lpDibLine1[7] = 0xff; // Alpha | |
r = ConvertYCrCbToRGBA(y2, cr, cb); | |
lpDibLine2[0] = r.rgbRed; | |
lpDibLine2[1] = r.rgbGreen; | |
lpDibLine2[2] = r.rgbBlue; | |
lpDibLine2[3] = 0xff; // Alpha | |
r = ConvertYCrCbToRGBA(y3, cr, cb); | |
lpDibLine2[4] = r.rgbRed; | |
lpDibLine2[5] = r.rgbGreen; | |
lpDibLine2[6] = r.rgbBlue; | |
lpDibLine2[7] = 0xff; // Alpha | |
lpLineY1 += 2; | |
lpLineY2 += 2; | |
lpLineCr += 2; | |
lpLineCb += 2; | |
lpDibLine1 += 8; | |
lpDibLine2 += 8; | |
} | |
pDst += (2 * dstStride); | |
lpBitsY += (2 * srcStride); | |
lpBitsCr += srcStride; | |
lpBitsCb += srcStride; | |
} | |
} | |
ColorConversionInfo CameraController::kColorConversionInfos[] = { | |
{ MFVideoFormat_RGB32, TransformImage_RGB32 }, | |
{ MFVideoFormat_RGB24, TransformImage_RGB24 }, | |
{ MFVideoFormat_YUY2, TransformImage_YUY2 }, | |
{ MFVideoFormat_NV12, TransformImage_NV12 } | |
}; | |
DWORD CameraController::kColorConversionInfoCount = ARRAYSIZE(CameraController::kColorConversionInfos); | |
bool CameraController::IsColorFormatSupported(const GUID& subtype) { | |
for (DWORD i = 0; i < CameraController::kColorConversionInfoCount; i++) { | |
if (subtype == CameraController::kColorConversionInfos[i].subtype) { | |
return true; | |
} | |
} | |
return false; | |
} | |
ColorConvertionFunc CameraController::GetColorConvertFunc(const GUID& subtype) { | |
for (DWORD i = 0; i < CameraController::kColorConversionInfoCount; i++) { | |
if (subtype == CameraController::kColorConversionInfos[i].subtype) { | |
return CameraController::kColorConversionInfos[i].xform; | |
} | |
} | |
return NULL; | |
} | |
bool CameraController::IsVideoSizeSupported(int width, int height) { | |
static int kSupportedSize[][2] = { {1280, 720}, {720, 1280}, {1920, 1080}, {1080, 1920} }; | |
for (int i = 0; i < 4; i++) { | |
if (width == kSupportedSize[i][0] && height == kSupportedSize[i][1]) { | |
return true; | |
} | |
} | |
return false; | |
} | |
bool CameraController::IsFrameRateSupported(float fps) { | |
static float kSupportedFps[] = { 24.f, 25.f, 30.f }; | |
for (size_t i = 0; i < 3; i++) { | |
if (fps == kSupportedFps[i]) { | |
return true; | |
} | |
} | |
return false; | |
} | |
bool CameraController::IsVideoFormatSupported(IMFMediaType* mediaType) { | |
GUID subtype{ 0 }; | |
UINT32 frameRate = 0; | |
UINT32 denominator = 0; | |
DWORD32 width, height; | |
HRESULT hr = mediaType->GetGUID(MF_MT_SUBTYPE, &subtype); | |
if (FAILED(hr) || !IsColorFormatSupported(subtype)) { | |
return false; | |
} | |
hr = MFGetAttributeSize(mediaType, MF_MT_FRAME_SIZE, &width, &height); | |
if (FAILED(hr) || !IsVideoSizeSupported(width, height)) { | |
return false; | |
} | |
hr = MFGetAttributeRatio(mediaType, MF_MT_FRAME_RATE, &frameRate, &denominator); | |
if (FAILED(hr) || !IsFrameRateSupported((float)frameRate / (float)denominator)) { | |
return false; | |
} | |
return true; | |
} | |
HRESULT CameraController::GetDefaultStride(IMFMediaType *pType, LONG *plStride) { | |
LONG lStride = 0; | |
// Try to get the default stride from the media type. | |
HRESULT hr = pType->GetUINT32(MF_MT_DEFAULT_STRIDE, (UINT32*)&lStride); | |
if (FAILED(hr)) | |
{ | |
// Attribute not set. Try to calculate the default stride. | |
GUID subtype = GUID_NULL; | |
UINT32 width = 0; | |
UINT32 height = 0; | |
// Get the subtype and the image size. | |
hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype); | |
if (SUCCEEDED(hr)) | |
{ | |
hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height); | |
} | |
if (SUCCEEDED(hr)) | |
{ | |
hr = MFGetStrideForBitmapInfoHeader(subtype.Data1, width, &lStride); | |
} | |
// Set the attribute for later reference. | |
if (SUCCEEDED(hr)) | |
{ | |
(void)pType->SetUINT32(MF_MT_DEFAULT_STRIDE, UINT32(lStride)); | |
} | |
} | |
if (SUCCEEDED(hr)) | |
{ | |
*plStride = lStride; | |
} | |
return hr; | |
} | |
EndShootStudioNameSpace | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#pragma once | |
#include <windows.h> | |
#include <windowsx.h> | |
#include <mfapi.h> | |
#include <mfidl.h> | |
#include <mfreadwrite.h> | |
#include <vector> | |
#include <QtCore/QtCore> | |
#include "../Sources/StudioConfig.h" | |
BeginShootStudioNameSpace | |
// | |
// ChooseDeviceParam structure | |
// | |
// Holds an array of IMFActivate pointers that represent video | |
// capture devices. | |
// | |
struct CameraDeviceParam | |
{ | |
IMFActivate **mDevices; // Array of IMFActivate pointers. | |
UINT32 mCount; // Number of elements in the array. | |
UINT32 mSelection; // Selected device, by array index. | |
}; | |
typedef void(*ColorConvertionFunc)(BYTE* pDest, LONG lDestStride, const BYTE* pSrc, LONG lSrcStride, DWORD width, DWORD height); | |
// Static table of output formats and conversion functions. | |
struct ColorConversionInfo | |
{ | |
GUID subtype; | |
ColorConvertionFunc xform; | |
}; | |
class CameraSourceModel; | |
class CameraController : public IMFSourceReaderCallback { | |
private: | |
IMFActivate * mActive; | |
IMFMediaSource * mMediaSource; | |
IMFAttributes * mAttributes; | |
IMFMediaType * mMediaType; | |
IMFSourceReader * mSourceReader; | |
std::vector<IMFMediaType * > mAllMediaTypes; | |
UINT mWidth; | |
UINT mHeight; | |
LONG mDefaultStride; | |
WCHAR * mSymbolicLink; | |
UINT32 mSymbolicLinkSize; | |
long mRefCount; | |
CRITICAL_SECTION mLock; | |
BYTE * mCameraImage; | |
ColorConvertionFunc mColorConvertFunc; | |
IMFTransform* mColorTransform; | |
IMFMediaType* mOutputMediaType; | |
CameraSourceModel* mCameraSource; | |
static bool GetCameraDevices(); | |
static void ReleaseCameraDevices(); | |
virtual ~CameraController(); | |
public: | |
CameraController(); | |
bool setDevice(IMFActivate * active); | |
void closeDevice(); | |
const std::vector<IMFMediaType * > * allMediaTypes(); | |
bool setMediaType(IMFMediaType * mediaType); | |
bool start(); | |
CameraSourceModel* getSourceModel(); | |
public: | |
STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample); | |
STDMETHODIMP OnEvent(DWORD, IMFMediaEvent *) { return S_OK; } | |
STDMETHODIMP OnFlush(DWORD) { return S_OK; } | |
STDMETHODIMP QueryInterface(REFIID iid, void** ppv); | |
STDMETHODIMP_(ULONG) AddRef(); | |
STDMETHODIMP_(ULONG) Release(); | |
public: | |
static ColorConversionInfo kColorConversionInfos[]; | |
static DWORD kColorConversionInfoCount; | |
static CameraDeviceParam kDeviceParam; | |
static QString GetCameraName(IMFActivate * camera); | |
static QString GetVideoFormatName(const GUID& guid); | |
static bool IsColorFormatSupported(const GUID& subtype); | |
static ColorConvertionFunc GetColorConvertFunc(const GUID& subtype); | |
static bool IsVideoSizeSupported(int width, int height); | |
static bool IsFrameRateSupported(float fps); | |
static bool IsVideoFormatSupported(IMFMediaType* mediaType); | |
static HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride); | |
}; | |
EndShootStudioNameSpace | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment