ADTF_DEVICE_TOOLBOX  3.12.1 (ADTF 3.18.3)
Demo Video4Linux Capture Device
Location
./src/examples/src/video4linux/
This example shows:
  • How to use Video4Linux to access the images send from a camera
  • How to implement a Streaming Source
Header for the main logic
#ifndef _V4L_CAPTURE_SOURCE_
#define _V4L_CAPTURE_SOURCE_
struct tInternalFormat : public streaming::tStreamImageFormat
{
tInt32 m_nPaletteSize;
tInt32 m_nBitsPerPixel;
};
class cVideo4LinuxCapture : public system::cKernelThreadLoop
{
private:
friend class cVideo4LinuxCaptureSource;
tInt m_iDev;
struct video_capability m_sCapabilities;
struct video_picture m_sPicture;
struct video_window m_sWindow;
struct video_mbuf m_sBuffer;
struct video_mmap m_sMap;
tVoid* m_pBuffer;
adtf_util::cString m_strDevice;
tFloat64 m_fBrightness;
tFloat64 m_fContrast;
tInt m_nWidth;
tInt m_nHeight;
tInt m_nDepth;
tInt m_nFormat;
tInt m_nCaptureMode;
tInt32 m_nImageSize;
tBool m_bReadMode;
ucom::object_ptr<streaming::IStreamType> m_pStreamType;
streaming::cSampleWriter m_oWriter;
adtf_util::cString m_strReceiverName;
ucom::object_ptr<services::IReferenceClock> m_pClock;
public:
cVideo4LinuxCapture();
virtual ~cVideo4LinuxCapture();
tVoid SetBrightness(const tFloat64& fBrightness);
tVoid SetContrast(const tFloat64& fContrast);
tVoid SetCaptureWidth(tInt nWidth);
tVoid SetCaptureHeight(tInt nHeight);
tVoid SetCaptureDepth(tInt nDepth);
tVoid SetCaptureFormat(tInt nFormat);
tVoid SetCaptureMode(tInt nMode);
tInt Read(tVoid* pvBuffer, tInt nBufferSize);
protected:
tResult DeviceOpen(const tChar* strName,
const tChar* strDevice,
streaming::tStreamImageFormat i_sFormat);
tResult DeviceClose();
tResult DeviceStart();
tResult DeviceStop();
protected: //implements cKernelThreadLoop
tResult LoopFunc() override;
};
#endif // _BLUE_FOX_CAPTURE_SOURCE_
Implementation for the main logic
#include "stdafx.h"
using namespace adtf;
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/mman.h>
#include <unistd.h>
#include <fcntl.h>
#include <libv4l1-videodev.h>
#include "v4lcapture.h"
#include <libv4l1.h>
__declare_statistics("Video4Linux Device");
cVideo4LinuxCapture::cVideo4LinuxCapture() :
adtf::system::cKernelThreadLoop(),
m_iDev(-1),
m_sCapabilities(),
m_sPicture(),
m_sWindow(),
m_sBuffer(),
m_sMap(),
m_pBuffer(nullptr),
m_strDevice("/dev/video0"),
m_fBrightness(-1.0),
m_fContrast(-1.0),
m_nWidth(-1),
m_nHeight(-1),
m_nDepth(-1),
m_nFormat(-1),
m_nImageSize(-1),
m_bReadMode(tTrue),
m_pStreamType(),
m_oWriter(),
m_strReceiverName(),
m_pClock()
{
}
cVideo4LinuxCapture::~cVideo4LinuxCapture()
{
}
tResult cVideo4LinuxCapture::DeviceOpen(const tChar* strName,
const tChar* strDevice,
streaming::tStreamImageFormat i_sFormat)
{
using namespace adtf_util; //cString, cStringUtil
m_strReceiverName = cString(strName) + "_t";
RETURN_IF_FAILED(_runtime->GetObject(m_pClock));
m_strDevice = "/dev/video0";
if (!cStringUtil::IsEqual(strDevice, "default"))
{
m_strDevice = strDevice;
}
if ((m_iDev = v4l1_open(m_strDevice.GetPtr(), O_RDWR)) < 0)
{
RETURN_ERROR_DESC(ERR_UNEXPECTED,
cString::Format("video4linux: failed to open %s", m_strDevice.GetPtr()));
}
if (v4l1_ioctl(m_iDev, VIDIOCGCAP, &m_sCapabilities) < 0)
{
DeviceClose();
RETURN_ERROR_DESC(ERR_UNEXPECTED, "video4linux: unable to read capabilities");
}
if (v4l1_ioctl(m_iDev, VIDIOCGPICT, &m_sPicture) < 0)
{
DeviceClose();
RETURN_ERROR_DESC(ERR_UNEXPECTED, "video4linux: unable to read picture properties");
}
if (v4l1_ioctl(m_iDev, VIDIOCSPICT, &m_sPicture) < 0)
{
DeviceClose();
RETURN_ERROR_DESC(ERR_UNEXPECTED, "video4linux: unable to set capture image properties");
}
if (v4l1_ioctl(m_iDev, VIDIOCGWIN, &m_sWindow) < 0)
{
DeviceClose();
RETURN_ERROR_DESC(ERR_UNEXPECTED, "video4linux: unable to read window properties");
}
if (m_nWidth > -1 && m_nHeight > -1)
{
if (m_nWidth >= m_sCapabilities.minwidth &&
m_nWidth <= m_sCapabilities.maxwidth &&
m_nHeight >= m_sCapabilities.minheight &&
m_nHeight <= m_sCapabilities.maxheight)
{
m_sWindow.width = m_nWidth;
m_sWindow.height = m_nHeight;
}
else
{
DeviceClose();
RETURN_ERROR_DESC(ERR_UNEXPECTED, "video4linux: resolution not supported by device");
}
}
if (v4l1_ioctl(m_iDev, VIDIOCSWIN, &m_sWindow) < 0)
{
DeviceClose();
RETURN_ERROR_DESC(ERR_UNEXPECTED, "video4linux: unable to set capture resolution");
}
switch (m_sPicture.palette)
{
case VIDEO_PALETTE_GREY:
switch (m_sPicture.depth)
{
case 8: i_sFormat.m_strFormatName = ADTF_IMAGE_FORMAT(GREYSCALE_8); break;
case 16: i_sFormat.m_strFormatName = ADTF_IMAGE_FORMAT(GREYSCALE_16); break;
case 24: i_sFormat.m_strFormatName = ADTF_IMAGE_FORMAT(GREYSCALE_24); break;
case 32: i_sFormat.m_strFormatName = ADTF_IMAGE_FORMAT(GREYSCALE_32); break;
}
break;
case VIDEO_PALETTE_RGB565: i_sFormat.m_strFormatName = ADTF_IMAGE_FORMAT(RGB_565); break;
case VIDEO_PALETTE_RGB24: i_sFormat.m_strFormatName = ADTF_IMAGE_FORMAT(RGB_24); break;
case VIDEO_PALETTE_RGB32: i_sFormat.m_strFormatName = ADTF_IMAGE_FORMAT(RGB_32); break;
case VIDEO_PALETTE_RGB555: i_sFormat.m_strFormatName = ADTF_IMAGE_FORMAT(RGB_555); break;
case VIDEO_PALETTE_YUV420P: i_sFormat.m_strFormatName = ADTF_IMAGE_FORMAT(YUV420P); break;
case VIDEO_PALETTE_YUYV: i_sFormat.m_strFormatName = ADTF_IMAGE_FORMAT(YUYV); break;
default: i_sFormat.m_strFormatName = ADTF_IMAGE_FORMAT(RGB_24); break;;
}
const tUInt nBytesPerLine = m_sWindow.width * m_sPicture.depth / sizeof(std::size_t);
m_nImageSize = nBytesPerLine * m_sWindow.height;
// set properties
m_pStreamType =
ucom::make_object_ptr<streaming::cStreamType>(streaming::stream_meta_type_image());
//stream image format
streaming::set_stream_type_image_format(*m_pStreamType, i_sFormat);
m_bReadMode = tTrue;
if (m_nCaptureMode != 1)
{
if (v4l1_ioctl(m_iDev, VIDIOCGMBUF, &m_sBuffer) == 0 && m_sBuffer.frames > 0)
{
m_bReadMode = tFalse;
m_pBuffer = v4l1_mmap(0, m_sBuffer.size, PROT_READ, MAP_SHARED, m_iDev, 0);
if (m_pBuffer == MAP_FAILED)
{
DeviceClose();
RETURN_ERROR_DESC(ERR_DEVICE_IO, "Cannot create mmap interface. Try read mode");
}
m_sMap.format = m_sPicture.palette;
m_sMap.frame = 0;
m_sMap.width = m_sWindow.width;
m_sMap.height = m_sWindow.height;
}
else if (m_nCaptureMode == 2)
{
RETURN_ERROR_DESC(ERR_NOT_SUPPORTED, "mmap interface is not supported");
}
}
if (m_bReadMode)
{
LOG_INFO("video4linux: Using read interface");
}
else
{
LOG_INFO("video4linux: Using mmap interface");
}
RETURN_NOERROR;
}
tResult cVideo4LinuxCapture::DeviceStart()
{
if (m_iDev < 0)
{
RETURN_ERROR(ERR_NOT_READY);
}
if (v4l1_ioctl(m_iDev, VIDIOCMCAPTURE, &m_sMap) < 0)
{
RETURN_ERROR_DESC(ERR_DEVICE_IO, "Unable to capture next frame: VIDIOCMCAPTURE");
}
RETURN_IF_FAILED(Create(m_strReceiverName.GetPtr()));
RETURN_IF_FAILED(SetState(tLoopState::Running));
RETURN_NOERROR;
}
tResult cVideo4LinuxCapture::DeviceStop()
{
SetState(tLoopState::Destroyed);
return Release();
}
tResult cVideo4LinuxCapture::DeviceClose()
{
if (m_iDev >= 0)
{
if (m_bReadMode == tFalse)
{
v4l1_munmap(m_pBuffer, m_sBuffer.size);
}
v4l1_close(m_iDev);
m_iDev = -1;
}
RETURN_NOERROR;
}
tInt cVideo4LinuxCapture::Read(tVoid* pvBuffer, tInt nBufferSize)
{
if (m_iDev < 0)
return -1;
if (m_bReadMode)
{
if (nBufferSize != v4l1_read(m_iDev, pvBuffer, nBufferSize))
{
LOG_ERROR("Unable to read from device");
return -1;
}
}
else
{
if (v4l1_ioctl(m_iDev, VIDIOCSYNC, &m_sMap.frame) < 0)
{
LOG_ERROR("Unable to sync to next frame: VIDIOCSYNC");
return -1;
}
adtf_util::cMemoryBlock::MemCopy(pvBuffer, m_pBuffer, nBufferSize);
if (v4l1_ioctl(m_iDev, VIDIOCMCAPTURE, &m_sMap) < 0)
{
LOG_ERROR("Unable to capture next frame: VIDIOCMCAPTURE");
return -1;
}
}
return nBufferSize;
}
// Setters
tVoid cVideo4LinuxCapture::SetBrightness(const tFloat64 & fBrightness)
{
m_fBrightness = fBrightness;
}
tVoid cVideo4LinuxCapture::SetContrast(const tFloat64 & fContrast)
{
m_fContrast = fContrast;
}
tVoid cVideo4LinuxCapture::SetCaptureWidth(tInt nWidth)
{
m_nWidth = nWidth;
}
tVoid cVideo4LinuxCapture::SetCaptureHeight(tInt nHeight)
{
m_nHeight = nHeight;
}
tVoid cVideo4LinuxCapture::SetCaptureDepth(tInt nDepth)
{
m_nDepth = nDepth;
}
tVoid cVideo4LinuxCapture::SetCaptureFormat(tInt nFormat)
{
m_nFormat = nFormat;
}
tVoid cVideo4LinuxCapture::SetCaptureMode(tInt nMode)
{
m_nCaptureMode = nMode;
}
tResult cVideo4LinuxCapture::LoopFunc()
{
using namespace streaming; //flush, trigger, ISample
std::vector<tChar> pBuffer(m_nImageSize, 0);
const auto nBytesRead = Read(pBuffer.data(), pBuffer.size());
if(-1 != nBytesRead)
{
ucom::object_ptr<ISample> pSample;
tResult nResult = alloc_sample(pSample);
if(IS_FAILED(nResult))
{
m_oWriter << nResult << flush << trigger;
}
nResult = pSample->Set(m_pClock->GetStreamTime(), pBuffer.data(), nBytesRead);
if(IS_FAILED(nResult))
{
m_oWriter << nResult << flush << trigger;
}
else
{
m_oWriter << pSample << flush << trigger;
}
}
RETURN_NOERROR;
}
ADTF - Namespace.
Header for the Streaming Source
#ifndef _V4L_INPUT_FILTER_HEADER_
#define _V4L_INPUT_FILTER_HEADER_
class cVideo4LinuxCaptureSource : public streaming::cSampleStreamingSource
{
public:
ADTF_CLASS_ID_NAME(cVideo4LinuxCaptureSource,
"demo_video4linux_capture_device.streaming_source.devicetb.cid",
"Video4Linux Capture Device");
public:
cVideo4LinuxCaptureSource();
tResult Construct() override;
tResult Destruct() override;
tResult StartStreaming() override;
tResult StopStreaming() override;
protected:
cVideo4LinuxCapture m_oCamera;
private:
base::property_variable<adtf_util::cString> m_strDeviceName = adtf_util::cString("default");
base::property_variable<tUInt32> m_ui32FrameRate = 20;
base::property_variable<tInt32> m_nHeight = 480;
base::property_variable<tInt32> m_nWidth = 640;
};//class cVideo4LinuxCaptureSource
#endif // _BLUE_FOX_INPUT_FILTER_HEADER_
Implementation for the Streaming Source
#include "stdafx.h"
using namespace adtf;
#include <libv4l1-videodev.h>
#include "v4lcapture.h"
#include "v4lfilter.h"
ADTF_PLUGIN_VERSION("Video4Linux Capture Device Plugin",
devicetb,
DEVICETB_VERSION_MAJOR,
DEVICETB_VERSION_MINOR,
DEVICETB_VERSION_PATCH,
cVideo4LinuxCaptureSource)
cVideo4LinuxCaptureSource::cVideo4LinuxCaptureSource()
{
m_strDeviceName.SetDescription("The name of the device. You can also address the device by ID, use device:<ID>.");
RegisterPropertyVariable("device", m_strDeviceName);
m_strDeviceName.SetDescription("Specifies the frames per second.");
RegisterPropertyVariable("framerate", m_ui32FrameRate);
m_strDeviceName.SetDescription("Sets the width for resolution.");
RegisterPropertyVariable("width", m_nWidth);
m_strDeviceName.SetDescription("Sets the height for resolution.");
RegisterPropertyVariable("height", m_nHeight);
SetDescription("Use this Streaming Source to receive data from a video device compatible with Video4Linux API.");
SetHelpLink("$(ADTF_DEVICE_TOOLBOX_DIR)/doc/adtf_device_toolbox_html/page_example_video4linux_capture_device.html");
// SetPropertyFloat("brightness", -1.0);
// SetPropertyStr("brightness" NSSUBPROP_DESCRIPTION,
// "Brightness of output video (-1,0..1).");
// SetPropertyFloat("contrast", -1.0);
// SetPropertyStr("contrast" NSSUBPROP_DESCRIPTION,
// "Contrast of output video (-1,0..1).");
// SetPropertyInt("capturewidth", -1);
// SetPropertyStr("capturewidth" NSSUBPROP_DESCRIPTION,
// "Horizontal recording resolution.");
// SetPropertyInt("captureheight", -1);
// SetPropertyStr("captureheight" NSSUBPROP_DESCRIPTION,
// "Vertical recording resolution.");
// SetPropertyInt("depth", -1);
// SetPropertyStr("depth" NSSUBPROP_DESCRIPTION,
// "Color depth of output video (-1,0..1).");
// SetPropertyInt("format", -1);
// // the following is taken from videodev.h
// SetPropertyStr("format" NSSUBPROP_VALUELIST,
// "-1@default|"
// "1@GREY|"
// "2@HI240|"
// "3@RGB565|"
// "4@RGB24|"
// "5@RGB32|"
// "6@RGB555|"
// "7@YUV422|"
// "8@YUYV|"
// "9@UYVY|"
// "10@YUV420|"
// "11@YUV411|"
// "12@RAW|"
// "13@YUV422P|"
// "14@YUV411P|"
// "15@YUV420P|"
// "16@YUV410P");
// SetPropertyStr("format" NSSUBPROP_DESCRIPTION,
// "Color format of output video.");
// SetPropertyInt("capturemode", 0);
// SetPropertyStr("capturemode" NSSUBPROP_VALUELIST,
// "0@auto|1@read|2@mmap");
// SetPropertyStr("capturemode" NSSUBPROP_DESCRIPTION,
// "Capture mode.");
// // remove properties not used
// ClearProperty("AutoExposeControl");
// ClearProperty("AutoGainControl");
// ClearProperty("updaterate");
// ClearProperty("UserConfig");
// ClearProperty("BytesPerLine");
// ClearProperty("PaletteSize");
// ClearProperty("SyncEnable");
// ClearProperty("RequestTimeout");
// ClearProperty("NumBuffers");
// m_pDevice = &m_oCamera;
}
tResult cVideo4LinuxCaptureSource::Construct()
{
RETURN_IF_FAILED(streaming::cSampleStreamingSource::Construct());
tInternalFormat sFormat{};
sFormat.m_ui32Width = m_nWidth;
sFormat.m_ui32Height = m_nHeight;
sFormat.m_strFormatName = ADTF_IMAGE_FORMAT(RGB_24);
const adtf_util::cString strDev = m_strDeviceName;
const adtf_util::cString strName = get_named_graph_object_full_name(*this);
RETURN_IF_FAILED(m_oCamera.DeviceOpen(strName, strDev, sFormat));
RETURN_IF_FAILED(create_pin(*this, m_oCamera.m_oWriter, "output", m_oCamera.m_pStreamType));
RETURN_NOERROR;
}
tResult cVideo4LinuxCaptureSource::Destruct()
{
m_oCamera.DeviceClose();
return cSampleStreamingSource::Destruct();
}
tResult cVideo4LinuxCaptureSource::StartStreaming()
{
RETURN_IF_FAILED(cSampleStreamingSource::StartStreaming());
return m_oCamera.DeviceStart();
}
tResult cVideo4LinuxCaptureSource::StopStreaming()
{
m_oCamera.DeviceStop();
return cSampleStreamingSource::StopStreaming();
}