ADTF_DEVICE_TOOLBOX  3.12.0 (ADTF 3.18.3)
Source Code for DirectShow Video Capture Device
Location
./src/examples/src/directshow_capture_device/sdk/strmbase
  • How to use DirectShow to access the images send from a camera
  • How to set the resolution in the image stream type
  • How to implement a streaming source
  • How to implement a QML editor to change properties

To build this streaming source, you need the Windows 10 SDK for Windows 10 and the strmbase Library provided in

Note
For information on DirectShow Application Programming use following link: http://msdn.microsoft.com/en-us/library/windows/desktop/dd390352(v=vs.85).aspx
Header for the main logic
#pragma once
#include <adtf_systemsdk.h>
#include <DSVideo.h>
#include <mutex>
#include <condition_variable>
class cDirectShowCaptureDevice: adtf::system::cKernelThreadLoop, public IDSStreamEventSink
{
public:
cDirectShowCaptureDevice() = default;
~cDirectShowCaptureDevice() override;
adtf::streaming::ISampleWriter* m_pWriter = nullptr;
adtf::ucom::object_ptr<adtf::streaming::IStreamType> m_pStreamType;
tResult DeviceOpen(const tChar* strReceiverName,
const tChar* strDevice,
tUInt32 nFrameRate,
const tInternalFormat& sFormat);
tResult DeviceClose();
tResult DeviceStart();
tResult DeviceStop();
// implements IDSStreamEventSink
void OnNewFrame(tTimeStamp nTime,
tUInt8* pData,
tInt nDataSize,
tInternalFormat* pBitmapFormat,
tColor* pPalette,
tBool bVFlip) override;
void OnStreamEvent(tInt lEventCode,
tInt lParam1,
tInt lParam2) override;
private:
tResult LoopFunc();
cDSInput m_oDSInput;
tInternalFormat m_sCurrentFormat;
adtf::ucom::object_ptr<adtf::services::IReferenceClock> m_pClock;
adtf::ucom::object_ptr<adtf::streaming::ISample> m_pCurrentSample;
tBool m_bTypeChanged = tFalse;
tTimeStamp m_tmFrameTime = {};
adtf::util::cString m_strReceiverName;
std::mutex m_oSync;
std::condition_variable m_oReceiveEvent;
};
Implementation for the main logic
#include "DirectShowCapture.h"
#include <easy/profiler.h>
#include <Windows.h>
#include <chrono>
#include <cstring>
#ifdef WIN32
#undef GetObject
#endif
using namespace adtf::streaming;
using namespace adtf::ucom;
using namespace adtf::util;
constexpr tUInt DEFAULT_FRAMERATE = 20;
constexpr tUInt DEFAULT_TIMEOUT = 2000000;
constexpr tUInt DEFAULT_BITSPERPIXEL = 8;
constexpr tTimeStamp MICROSEC_PER_SEC = 1000000;
cDirectShowCaptureDevice::~cDirectShowCaptureDevice()
{
m_oDSInput.Release();
}
tResult cDirectShowCaptureDevice::DeviceOpen(const tChar* strReceiverName,
const tChar* strDeviceName,
tUInt32 nFrameRate,
const tInternalFormat& sFormat)
{
m_strReceiverName = strReceiverName;
_runtime->GetObject(m_pClock);
cDSInput::tDeviceConfig sDSDeviceConfig;
std::memset(&sDSDeviceConfig, 0, sizeof(sDSDeviceConfig));
sDSDeviceConfig.ui32Mask = cDSInput::DCM_FrameRate;
sDSDeviceConfig.nFrameRate = nFrameRate;
if (nFrameRate != 0)
{
m_tmFrameTime = MICROSEC_PER_SEC / nFrameRate;
}
else
{
m_tmFrameTime = MICROSEC_PER_SEC / DEFAULT_FRAMERATE;
}
m_pStreamType = make_object_ptr<cStreamType>(stream_meta_type_image());
set_stream_type_image_format(*m_pStreamType, sFormat);
sDSDeviceConfig.sBitmapFormat = sFormat;
HRESULT hr = S_OK;
if (!m_oDSInput.Open(strDeviceName, nullptr, this, &sDSDeviceConfig, 0, &hr))
{
if (hr != S_OK)
{
cString strError = cDSInput::GetDSError(hr);
RETURN_ERROR_DESC(ERR_INVALID_ARG, "DirectShow Error: %s", strError.GetPtr());
}
else
{
RETURN_ERROR_DESC(ERR_INVALID_ARG, "Can not open device (error unknown)");
}
}
const tInternalFormat* psCaptureFormat = m_oDSInput.GetBitmapFormat();
set_stream_type_image_format(*m_pStreamType, *psCaptureFormat);
RETURN_NOERROR;
}
tResult cDirectShowCaptureDevice::DeviceStart()
{
m_oDSInput.Play();
RETURN_IF_FAILED(Create(m_strReceiverName + "_t"));
RETURN_IF_FAILED(SetState(tLoopState::Running));
RETURN_NOERROR;
}
tResult cDirectShowCaptureDevice::DeviceStop()
{
m_oDSInput.Stop();
{
std::unique_lock<std::mutex> oLock(m_oSync);
m_pCurrentSample = nullptr;
m_bTypeChanged = tFalse;
SetState(tLoopState::Suspended, tFalse);
m_oReceiveEvent.notify_all();
}
SetState(tLoopState::Suspended, tTrue);
RETURN_NOERROR;
}
tResult cDirectShowCaptureDevice::DeviceClose()
{
SetState(tLoopState::Destroyed, tFalse);
m_oDSInput.Release();
RETURN_NOERROR;
}
tResult cDirectShowCaptureDevice::LoopFunc()
{
object_ptr<ISample> pSample;
{
std::unique_lock<std::mutex> oLock(m_oSync);
m_oReceiveEvent.wait_for(oLock, std::chrono::microseconds(m_tmFrameTime));
if (m_bTypeChanged)
{
set_stream_type_image_format(*m_pStreamType, m_sCurrentFormat);
object_ptr<const IStreamType> pNewType = m_pStreamType;
m_bTypeChanged = tFalse;
*m_pWriter << pNewType;
m_pWriter->ManualTrigger();
}
pSample = m_pCurrentSample;
}
if (pSample)
{
m_pWriter->Write(pSample);
m_pWriter->ManualTrigger();
}
RETURN_NOERROR;
}
void cDirectShowCaptureDevice::OnNewFrame(tTimeStamp nTime,
tUInt8* pData,
tInt nDataSize,
tInternalFormat* pBitmapFormat,
tColor* pPalette,
tBool bVFlip)
{
EASY_FUNCTION(m_strReceiverName);
tBool bTypeChanged = tFalse;
if (pBitmapFormat != nullptr)
{
if (*pBitmapFormat != m_sCurrentFormat)
{
m_sCurrentFormat = *pBitmapFormat;
bTypeChanged = tTrue;
}
}
object_ptr<ISample> pSample;
if (IS_FAILED(alloc_sample(pSample)))
{
*m_pWriter << ERR_MEMORY;
m_pWriter->ManualTrigger();
}
if (IS_FAILED(pSample->Set(m_pClock->GetStreamTime(), pData, nDataSize)))
{
*m_pWriter << ERR_MEMORY;
m_pWriter->ManualTrigger();
}
if (!bVFlip)
{
// top-down orientation
}
else
{
object_ptr_locked<ISampleBuffer> pBuffer;
pSample->WriteLock(pBuffer, nDataSize);
if (pBuffer)
{
tVoid* pCurrentBuffer = pBuffer->GetPtr();
// bottom-up orientation, flip vertically
if (m_sCurrentFormat.m_ui32Height > 0)
{
tSize nBytesPerLine = m_sCurrentFormat.m_szMaxByteSize / m_sCurrentFormat.m_ui32Height;
const tUInt8* pSrcLine = pData;
const tUInt8* pSrcLineEnd = pData + m_sCurrentFormat.m_szMaxByteSize;
tUInt8* pDestLine = static_cast<tUInt8*>(pCurrentBuffer) + ((m_sCurrentFormat.m_ui32Height - 1) * nBytesPerLine);
while (pSrcLine < pSrcLineEnd)
{
cMemoryBlock::MemCopy(pDestLine, pSrcLine, nBytesPerLine);
pSrcLine += nBytesPerLine;
pDestLine -= nBytesPerLine;
}
}
}
}
{
//set the sample and type information only with a short lock
std::unique_lock<std::mutex> oLock(m_oSync);
if (bTypeChanged)
{
m_bTypeChanged = tTrue;
m_pCurrentSample = nullptr;
}
if (pSample)
{
m_pCurrentSample = pSample;
}
m_oReceiveEvent.notify_all();
}
}
void cDirectShowCaptureDevice::OnStreamEvent(tInt lEventCode,
tInt lParam1,
tInt lParam2)
{
}
Header for the Streaming Source
#pragma once
#include <adtf_filtersdk.h>
#include "DirectShowCapture.h"
#define CID_DEVTB_DIRECTSHOW_CAPTURE_DEVICE "demo_directshow_capture_device.streaming_source.devicetb.cid"
class cDirectShowCaptureSource: public adtf::filter::cSampleStreamingSource
{
public:
ADTF_CLASS_ID_NAME(cDirectShowCaptureSource,
CID_DEVTB_DIRECTSHOW_CAPTURE_DEVICE,
"DirectShow Video Capture Device");
cDirectShowCaptureSource();
~cDirectShowCaptureSource() override;
tResult Init() override;
tResult StartStreaming() override;
tResult StopStreaming() override;
protected:
cDirectShowCaptureDevice m_oCamera;
adtf::base::property_variable<adtf::util::cString> m_strDeviceName = "default";
adtf::base::property_variable<tUInt32> m_nFrameRate = 0;
adtf::base::property_variable<tInt32> m_nHeight = 0;
adtf::base::property_variable<tInt32> m_nWidth = 0;
};
Implementation for the Streaming Source
#include "DirectShowSource.h"
using namespace adtf::util;
using namespace adtf::streaming;
using namespace adtf::ucom;
ADTF_PLUGIN_VERSION("DirectShow Video Capture Device Plugin",
devicetb,
DEVICETB_VERSION_MAJOR,
DEVICETB_VERSION_MINOR,
DEVICETB_VERSION_PATCH,
cDirectShowCaptureSource)
namespace
{
cString GetNamedGraphObjectFullName(const INamedGraphObject& oGraphObject,
const tChar* strSeperator)
{
cString strMyName {};
cString strFullName {};
if (IS_OK(oGraphObject.GetName(adtf::base::adtf_string<cString>(&strMyName))))
{
const IObject* pParent {};
if (IS_OK(oGraphObject.GetParent(pParent)) && pParent)
{
const INamedGraphObject* pParentObject {};
pParentObject = ucom_cast<const INamedGraphObject*>(pParent);
if (pParentObject)
{
strFullName = GetNamedGraphObjectFullName(*pParentObject, strSeperator);
strFullName.Append(strSeperator);
}
}
}
strFullName.Append(strMyName);
return strFullName;
}
} // namespace
cDirectShowCaptureSource::cDirectShowCaptureSource()
{
m_strDeviceName.SetDescription("The name of the device. You can also address the device by ID, use device:<ID>.");
RegisterPropertyVariable("device", m_strDeviceName);
m_nFrameRate.SetDescription("Specifies the frames per second.");
RegisterPropertyVariable("framerate", m_nFrameRate);
m_nWidth.SetDescription("Sets the width for resolution.");
RegisterPropertyVariable("width", m_nWidth);
m_nHeight.SetDescription("Sets the height for resolution.");
RegisterPropertyVariable("height", m_nHeight);
m_oCamera.m_pWriter = CreateOutputPin("video", stream_meta_type_image());
SetDescription("video", "The captured video sample stream.");
SetEditor("Configure Camera...", "demo_camera_config_editor.qml");
SetDescription("Use this Streaming Source to receive data from a video device compatible with DirectShow API.");
SetHelpLink("$(ADTF_DEVICE_TOOLBOX_DIR)/doc/adtf_device_toolbox_html/page_example_directshow_capture_device.html");
}
cDirectShowCaptureSource::~cDirectShowCaptureSource()
{
m_oCamera.DeviceClose();
}
tResult cDirectShowCaptureSource::Init()
{
RETURN_IF_FAILED(cSampleStreamingSource::Init());
tInternalFormat sFormat {};
sFormat.m_ui32Height = m_nHeight;
sFormat.m_ui32Width = m_nWidth;
cString strDev {m_strDeviceName};
cString strName {GetNamedGraphObjectFullName(*this, ".")};
if (!strName.IsEmpty())
{
RETURN_IF_FAILED(m_oCamera.DeviceOpen(strName, strDev, m_nFrameRate, sFormat));
m_oCamera.m_pWriter->ChangeType(m_oCamera.m_pStreamType);
}
RETURN_NOERROR;
}
tResult cDirectShowCaptureSource::StartStreaming()
{
RETURN_IF_FAILED(cSampleStreamingSource::StartStreaming());
RETURN_IF_FAILED(m_oCamera.DeviceStart());
RETURN_NOERROR;
}
tResult cDirectShowCaptureSource::StopStreaming()
{
m_oCamera.DeviceStop();
return cSampleStreamingSource::StopStreaming();
}
QML Filter Editor
/**
*
* @file
* Copyright &copy; Audi Electronics Venture GmbH. All rights reserved
*
*/
import QtQuick 2.12
import QtQuick.Window 2.2
import QtQuick.Controls 2.12
import QtQuick.Layouts 1.12
import QtMultimedia 5.12
import EditorPlugin 1.0
import Utilities 1.0
EditorPluginBase{
id: root
property var selectedDevice: getPropertyValue(targetModel,"device")
property var selectedFramerate: getPropertyValue(targetModel,"framerate")
property var selectedHeight: getPropertyValue(targetModel,"height")
property var selectedWidth: getPropertyValue(targetModel,"width")
ApplicationWindow{
id: window
width: ScaleHelper.getScaledValue(460)
height: ScaleHelper.getScaledValue(320)
visible: true
title: qsTr("Camera Configuration Editor")
Component.onCompleted:
{
Qt.callLater(function()
{
visible = true
window.raise()
window.requestActivate()
})
}
header: ToolBar{
RowLayout{
spacing: ScaleHelper.getScaledValue(20)
anchors.fill: parent
Label{
text: qsTr("Configure Camera:")
font.pixelSize: 20
horizontalAlignment: Qt.AlignHCenter
verticalAlignment: Qt.AlignVCenter
Layout.fillWidth: true
}
}
}
Item{
anchors.fill: parent
GridLayout{
columns: 2
anchors.left: parent.left
anchors.leftMargin: ScaleHelper.getScaledValue(20)
anchors.right:parent.right
anchors.rightMargin: ScaleHelper.getScaledValue(20)
anchors.top:parent.top
anchors.topMargin: ScaleHelper.getScaledValue(20)
anchors.bottom: parent.verticalCenter
anchors.bottomMargin: ScaleHelper.getScaledValue(20)
Label{
wrapMode: Label.Wrap
text: qsTr("Device:")
Layout.alignment: Qt.AlignLeft
}
ComboBox{
id: cameraComboBox
textRole: "name"
model: cameraConfig
Layout.fillWidth: true
Component.onCompleted:{
for(let i = 0; i < cameraConfig.count; i++){
if(cameraConfig.get(i).name === selectedDevice){
camera.deviceId = cameraConfig.get(i).camId
camera.cameraState = Camera.LoadedState
cameraComboBox.displayText = selectedDevice
resolutionComboBox.enable()
return
}
}
cameraComboBox.displayText = qsTr("Select Device...")
}
onActivated:{
const previousSelectedDevice = selectedDevice
if(cameraConfig.get(currentIndex).camId !== 0){
camera.deviceId = cameraConfig.get(currentIndex).camId
selectedDevice = cameraConfig.get(currentIndex).name
camera.cameraState = Camera.LoadedState
resolutionComboBox.enable()
if(selectedDevice !== previousSelectedDevice){
resolutionComboBox.displayText = qsTr("Select Resolution...")
framerateComboBox.displayText = qsTr("Select FrameRate...")
framerateComboBox.enabled = false
}
}
cameraComboBox.displayText = selectedDevice
}
}
Label{
wrapMode: Label.Wrap
horizontalAlignment: Qt.AlignHCenter
text: qsTr("Resolution:")
}
ComboBox{
id: resolutionComboBox
textRole: "text"
model: resolutionConfig
focus: true
Layout.fillWidth: true
Component.onCompleted:{
enabled = false
resolutionComboBox.displayText = qsTr("Select Resolution...")
}
onActivated:{
const resolution = (resolutionConfig.get(currentIndex).text).split("x")
selectedHeight = resolution[1]
selectedWidth = resolution[0]
resolutionComboBox.displayText = currentText
framerateComboBox.enable()
framerateComboBox.displayText = qsTr("Select FrameRate...")
}
function enable(){
resolutionConfig.list_supported_resolutions()
enabled = true
resolutionComboBox.displayText = selectedWidth + "x" + selectedHeight
framerateComboBox.enable()
}
}
Label{
wrapMode: Label.Wrap
horizontalAlignment: Qt.AlignHCenter
text: qsTr("Framerate:")
}
ComboBox{
id: framerateComboBox
textRole: "text"
model: framerateConfig
Layout.fillWidth: true
Component.onCompleted:{
enabled = false
framerateComboBox.displayText = qsTr("Select FrameRate...")
}
onActivated:{
selectedFramerate = framerateConfig.get(currentIndex).text
framerateComboBox.displayText = selectedFramerate
}
onDisplayTextChanged:{
applyButton.checkActivation()
}
function enable(){
framerateConfig.list_supported_framerates()
enabled = true
framerateComboBox.displayText = selectedFramerate
}
}
}
ListModel{
id: cameraConfig
Component.onCompleted:{
list_available_cameras(cameraConfig)
}
function list_available_cameras(model){
cameraConfig.clear()
for(let i = 0; i < QtMultimedia.availableCameras.length; i++){
model.append(
{
name: QtMultimedia.availableCameras[i].displayName,
camId: QtMultimedia.availableCameras[i].deviceId
}
)
}
}
}
ListModel{
id: resolutionConfig
function list_supported_resolutions(){
resolutionConfig.clear()
const resolutions = camera.supportedViewfinderResolutions()
for(let i = 0; i < resolutions.length; i++){
resolutionConfig.append(
{
text: "" + resolutions[i].width + "x" + resolutions[i].height
}
)
}
}
}
ListModel{
id: framerateConfig
function list_supported_framerates(){
framerateConfig.clear()
const frameRateRanges = camera.supportedViewfinderFrameRateRanges(Qt.size(selectedWidth,selectedHeight))
for(let i = 0; i < frameRateRanges.length; i++){
const frameRate = Math.round(frameRateRanges[i].minimumFrameRate)
framerateConfig.append(
{
text: "" + frameRate /*""+frameRateRanges[i].minimumFrameRate+"-"+frameRateRanges[i].maximumFrameRate*/
}
)
}
}
}
}
footer: DialogButtonBox{
Button{
id: applyButton
text: qsTr("Apply")
DialogButtonBox.buttonRole: DialogButtonBox.AcceptRole
Component.onCompleted:{
checkActivation()
}
function checkActivation(){
if(cameraComboBox.displayText !== qsTr("Select Device...") &&
framerateComboBox.displayText !== qsTr("Select FrameRate...") &&
resolutionComboBox.displayText !== qsTr("Select Resolution...")){
enabled = true
}
else{
enabled = false
}
}
}
Button{
text: qsTr("Cancel")
DialogButtonBox.buttonRole: DialogButtonBox.RejectRole
}
onAccepted:{
setProperty(targetModel, "device", selectedDevice)
setProperty(targetModel, "framerate", selectedFramerate)
setProperty(targetModel, "height", selectedHeight)
setProperty(targetModel, "width", selectedWidth)
camera.stop()
window.close()
}
onRejected:{
camera.stop()
window.close()
}
}
}
Camera{
id: camera
Component.onCompleted:{
camera.cameraState = Camera.UnloadedState
}
}
}