summaryrefslogtreecommitdiffstats
path: root/kopete/libkopete/avdevice
diff options
context:
space:
mode:
Diffstat (limited to 'kopete/libkopete/avdevice')
-rw-r--r--kopete/libkopete/avdevice/Makefile.am18
-rw-r--r--kopete/libkopete/avdevice/bayer.cpp118
-rw-r--r--kopete/libkopete/avdevice/bayer.h30
-rw-r--r--kopete/libkopete/avdevice/kxv.cpp711
-rw-r--r--kopete/libkopete/avdevice/kxv.h260
-rw-r--r--kopete/libkopete/avdevice/qvideo.cpp154
-rw-r--r--kopete/libkopete/avdevice/qvideo.h68
-rw-r--r--kopete/libkopete/avdevice/qvideostream.cpp731
-rw-r--r--kopete/libkopete/avdevice/qvideostream.h112
-rw-r--r--kopete/libkopete/avdevice/sonix_compress.cpp180
-rw-r--r--kopete/libkopete/avdevice/sonix_compress.h8
-rw-r--r--kopete/libkopete/avdevice/videocontrol.cpp36
-rw-r--r--kopete/libkopete/avdevice/videocontrol.h82
-rw-r--r--kopete/libkopete/avdevice/videodevice.cpp2752
-rw-r--r--kopete/libkopete/avdevice/videodevice.h333
-rw-r--r--kopete/libkopete/avdevice/videodevicemodelpool.cpp68
-rw-r--r--kopete/libkopete/avdevice/videodevicemodelpool.h53
-rw-r--r--kopete/libkopete/avdevice/videodevicepool.cpp889
-rw-r--r--kopete/libkopete/avdevice/videodevicepool.h127
-rw-r--r--kopete/libkopete/avdevice/videoinput.cpp172
-rw-r--r--kopete/libkopete/avdevice/videoinput.h89
21 files changed, 6991 insertions, 0 deletions
diff --git a/kopete/libkopete/avdevice/Makefile.am b/kopete/libkopete/avdevice/Makefile.am
new file mode 100644
index 00000000..a234f797
--- /dev/null
+++ b/kopete/libkopete/avdevice/Makefile.am
@@ -0,0 +1,18 @@
+INCLUDES =$(GLINC) $(all_includes)
+AM_CPPFLAGS = -DKDE_NO_COMPAT -DQT_NO_COMPAT -DQT_NO_CAST_ASCII -DQT_NO_ASCII_CAST \
+ $(KOPETE_INCLUDES) -I$(top_srcdir)/kopete/libkopete/private \
+ -I$(top_srcdir)/kopete/libkopete/ui $(all_includes)
+METASOURCES = AUTO
+lib_LTLIBRARIES = libkopete_videodevice.la
+noinst_LTLIBRARIES = libkvideoio.la
+libkopete_videodevice_la_LDFLAGS = $(KDE_RPATH) $(all_libraries)
+
+noinst_HEADERS = kxv.h qvideo.h qvideostream.h videocontrol.h videodevice.h \
+ videodevicemodelpool.h videodevicepool.h videoinput.h \
+ sonix_compress.h bayer.h
+libkopete_videodevice_la_SOURCES = videocontrol.cpp videodevice.cpp \
+ videodevicemodelpool.cpp videodevicepool.cpp videoinput.cpp \
+ sonix_compress.cpp bayer.cpp
+libkvideoio_la_LDFLAGS = -no-undefined $(all_libraries) -version-info 1:0:0
+libkvideoio_la_SOURCES = kxv.cpp qvideo.cpp qvideostream.cpp
+libkvideoio_la_LIBADD = $(LIB_QT) $(LIB_KDECORE) $(GLLIB)
diff --git a/kopete/libkopete/avdevice/bayer.cpp b/kopete/libkopete/avdevice/bayer.cpp
new file mode 100644
index 00000000..69189bae
--- /dev/null
+++ b/kopete/libkopete/avdevice/bayer.cpp
@@ -0,0 +1,118 @@
+/*
+ * BAYER2RGB24 ROUTINE TAKEN FROM:
+ *
+ * Sonix SN9C101 based webcam basic I/F routines
+ * Copyright (C) 2004 Takafumi Mizuno <[email protected]>
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+void bayer2rgb24(unsigned char *dst, unsigned char *src, long int WIDTH, long int HEIGHT)
+{
+ long int i;
+ unsigned char *rawpt, *scanpt;
+ long int size;
+
+ rawpt = src;
+ scanpt = dst;
+ size = WIDTH*HEIGHT;
+
+ for ( i = 0; i < size; i++ )
+ {
+ if ( (i/WIDTH) % 2 == 0 )
+ {
+ if ( (i % 2) == 0 )
+ {
+ // B
+ if ( (i > WIDTH) && ((i % WIDTH) > 0) )
+ {
+ *scanpt++ = (*(rawpt-WIDTH-1)+*(rawpt-WIDTH+1)+*(rawpt+WIDTH-1)+*(rawpt+WIDTH+1))/4; // R
+ *scanpt++ = (*(rawpt-1)+*(rawpt+1)+*(rawpt+WIDTH)+*(rawpt-WIDTH))/4; // G
+ *scanpt++ = *rawpt; // B
+ }
+ else
+ {
+ // first line or left column
+ *scanpt++ = *(rawpt+WIDTH+1); // R
+ *scanpt++ = (*(rawpt+1)+*(rawpt+WIDTH))/2; // G
+ *scanpt++ = *rawpt; // B
+ }
+ }
+ else
+ {
+ // (B)G
+ if ( (i > WIDTH) && ((i % WIDTH) < (WIDTH-1)) )
+ {
+ *scanpt++ = (*(rawpt+WIDTH)+*(rawpt-WIDTH))/2; // R
+ *scanpt++ = *rawpt; // G
+ *scanpt++ = (*(rawpt-1)+*(rawpt+1))/2; // B
+ }
+ else
+ {
+ // first line or right column
+ *scanpt++ = *(rawpt+WIDTH); // R
+ *scanpt++ = *rawpt; // G
+ *scanpt++ = *(rawpt-1); // B
+ }
+ }
+ }
+ else
+ {
+ if ( (i % 2) == 0 )
+ {
+ // G(R)
+ if ( (i < (WIDTH*(HEIGHT-1))) && ((i % WIDTH) > 0) )
+ {
+ *scanpt++ = (*(rawpt-1)+*(rawpt+1))/2; // R
+ *scanpt++ = *rawpt; // G
+ *scanpt++ = (*(rawpt+WIDTH)+*(rawpt-WIDTH))/2; // B
+ }
+ else
+ {
+ // bottom line or left column
+ *scanpt++ = *(rawpt+1); /* R */
+ *scanpt++ = *rawpt; /* G */
+ *scanpt++ = *(rawpt-WIDTH); /* B */
+ }
+ }
+ else
+ {
+ // R
+ if ( i < (WIDTH*(HEIGHT-1)) && ((i % WIDTH) < (WIDTH-1)) )
+ {
+ *scanpt++ = *rawpt; // R
+ *scanpt++ = (*(rawpt-1)+*(rawpt+1)+*(rawpt-WIDTH)+*(rawpt+WIDTH))/4; // G
+ *scanpt++ = (*(rawpt-WIDTH-1)+*(rawpt-WIDTH+1)+*(rawpt+WIDTH-1)+*(rawpt+WIDTH+1))/4; // B
+ }
+ else
+ {
+ // bottom line or right column
+ *scanpt++ = *rawpt; /* R */
+ *scanpt++ = (*(rawpt-1)+*(rawpt-WIDTH))/2; /* G */
+ *scanpt++ = *(rawpt-WIDTH-1); /* B */
+ }
+ }
+ }
+ rawpt++;
+ }
+}
+
diff --git a/kopete/libkopete/avdevice/bayer.h b/kopete/libkopete/avdevice/bayer.h
new file mode 100644
index 00000000..af6d8baf
--- /dev/null
+++ b/kopete/libkopete/avdevice/bayer.h
@@ -0,0 +1,30 @@
+/*
+ * BAYER2RGB24 ROUTINE TAKEN FROM:
+ *
+ * Sonix SN9C101 based webcam basic I/F routines
+ * Copyright (C) 2004 Takafumi Mizuno <[email protected]>
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+
+void bayer2rgb24 (unsigned char *dst, unsigned char *src, long int WIDTH, long int HEIGHT);
diff --git a/kopete/libkopete/avdevice/kxv.cpp b/kopete/libkopete/avdevice/kxv.cpp
new file mode 100644
index 00000000..661bdfad
--- /dev/null
+++ b/kopete/libkopete/avdevice/kxv.cpp
@@ -0,0 +1,711 @@
+/*
+ * KDE Xv interface
+ *
+ * Copyright (C) 2001 George Staikos ([email protected])
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include <assert.h>
+
+#include <qwindowdefs.h>
+#include <qwidget.h>
+
+#include <kdebug.h>
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include "kxv.h"
+
+
+#include <X11/X.h>
+#include <X11/Xlib.h>
+#include <X11/StringDefs.h>
+#include <X11/Xatom.h>
+#ifdef HAVE_XSHM
+extern "C" {
+#include <sys/shm.h>
+#include <X11/extensions/XShm.h>
+}
+#endif
+
+#ifdef HAVE_LIBXV
+#include <X11/extensions/Xv.h>
+#include <X11/extensions/Xvlib.h>
+#endif
+
+#ifdef HAVE_LIBXVMC
+#include <X11/extensions/XvMC.h>
+#include <X11/extensions/XvMClib.h>
+#endif
+
+
+KXv::KXv()
+{
+ xv_adaptors = 0;
+ _devs.setAutoDelete(true);
+}
+
+
+KXv::~KXv()
+{
+ kdDebug() << "KXv::~KXv: Close Xv connection." << endl;
+ _devs.clear();
+
+#ifdef HAVE_LIBXV
+ if (xv_adaptors > 0)
+ XvFreeAdaptorInfo((XvAdaptorInfo *)xv_adaptor_info);
+#endif
+}
+
+
+KXvDeviceList& KXv::devices()
+{
+ return _devs;
+}
+
+
+bool KXv::haveXv()
+{
+#ifndef HAVE_LIBXV
+ return false;
+#else
+ unsigned int tmp;
+ if (Success != XvQueryExtension(qt_xdisplay(),
+ &tmp,
+ &tmp,
+ &tmp,
+ &tmp,
+ &tmp))
+ return false;
+
+ return true;
+#endif
+}
+
+
+KXv* KXv::connect(Drawable d)
+{
+ KXv *xvptr;
+
+ xvptr = new KXv;
+ if (!xvptr->init(d)) {
+ kdDebug() << "KXv::connect: Xv init failed." << endl;
+ delete xvptr;
+ return NULL;
+ }
+
+ kdDebug() << "KXv::connect: Xv init completed." << endl;
+ return xvptr;
+}
+
+
+bool KXv::init(Drawable d)
+{
+#ifndef HAVE_LIBXV
+ return false;
+#else
+ if (Success != XvQueryExtension(qt_xdisplay(),
+ &xv_version,
+ &xv_release,
+ &xv_request,
+ &xv_event,
+ &xv_error)) {
+ kdWarning() << "KXv::init: Xv extension not available." << endl;
+ return false;
+ }
+
+#ifdef HAVE_LIBXVMC
+ // Causes crashes for some people.
+ // if (Success == XvMCQueryExtension(qt_xdisplay(),0,0)) {
+ // kdDebug() << "Found XvMC!" << endl;
+ // }
+#endif
+
+ if (Success != XvQueryAdaptors(qt_xdisplay(),
+ d,
+ &xv_adaptors,
+ (XvAdaptorInfo **)&xv_adaptor_info)) {
+ // Note technically fatal... what to do?
+ kdWarning() << "KXv::init: XvQueryAdaptors failed." << endl;
+ }
+
+ XvAdaptorInfo *ai = (XvAdaptorInfo *)xv_adaptor_info;
+
+ for (unsigned int i = 0; i < xv_adaptors; i++) {
+ KXvDevice *xvd = new KXvDevice;
+ xvd->xv_type = ai[i].type;
+ xvd->xv_port = ai[i].base_id;
+ xvd->xv_name = ai[i].name;
+ xvd->xv_adaptor = i;
+ xvd->xv_nvisualformats = ai[i].num_formats;
+ xvd->xv_visualformats = ai[i].formats;
+ if (ai[i].type & XvInputMask &&
+ ai[i].type & XvVideoMask ) {
+ kdDebug() << "KXv::init: Xv VideoMask port " << ai[i].base_id << " was found."
+ << " Device is: " << ai[i].name << "." << endl;
+ }
+ if (ai[i].type & XvInputMask &&
+ ai[i].type & XvImageMask ) {
+ kdDebug() << "KXv::init: Xv ImageMask port " << ai[i].base_id << " was found."
+ << " Device is: " << ai[i].name << "." << endl;
+ }
+
+ if (xvd->init()) {
+ _devs.append(xvd);
+ } else {
+ delete xvd;
+ }
+ }
+
+ return true;
+#endif
+}
+
+bool KXvDevice::grabStill(QImage* /*pix*/, int /*dw*/, int /*dh*/)
+{
+#ifndef HAVE_LIBXV
+ return false;
+#else
+ return false;
+#endif
+}
+
+int KXvDevice::displayImage(QWidget *widget, const unsigned char *const data, int w, int h, int dw, int dh)
+{
+ if (!widget)
+ return -1;
+ return displayImage(widget->winId(), data, w, h, 0, 0, w, h, dw, dh);
+}
+
+int KXvDevice::displayImage(QWidget *widget, const unsigned char *const data, int w, int h, int x, int y, int sw, int sh, int dw, int dh)
+{
+ if (!widget)
+ return -1;
+ return displayImage(widget->winId(), data, w, h, x, y, sw, sh, dw, dh);
+}
+
+int KXvDevice::displayImage(Window win, const unsigned char *const data, int w, int h, int dw, int dh)
+{
+ return displayImage(win, data, w, h, 0, 0, w, h, dw, dh);
+}
+
+int KXvDevice::displayImage(Window win, const unsigned char *const data, int w, int h, int x, int y, int sw, int sh, int dw, int dh)
+{
+#ifndef HAVE_LIBXV
+ return -1;
+#else
+ Q_ASSERT(xv_port != -1);
+
+ // Must be a video capable device!
+ if (!(xv_type & XvImageMask) || !(xv_type & XvInputMask)) {
+ kdWarning() << "KXvDevice::displayImage: This is not a video capable device." << endl;
+ return -1;
+ }
+
+ if (xv_image_w != w || xv_image_h != h || !xv_image)
+ rebuildImage(w, h, _shm);
+
+ if (!xv_image)
+ return -1;
+
+ if (win != xv_last_win && xv_gc) {
+ XFreeGC(qt_xdisplay(), xv_gc);
+ xv_gc = 0;
+ }
+
+ if (!xv_gc) {
+ xv_last_win = win;
+ xv_gc = XCreateGC(qt_xdisplay(), win, 0, NULL);
+ }
+
+ int rc = 0;
+ Q_ASSERT(xv_image);
+ if (!_shm) {
+ static_cast<XvImage*>(xv_image)->data =
+ (char *)const_cast<unsigned char*>(data);
+ rc = XvPutImage(qt_xdisplay(), xv_port, win, xv_gc,
+ static_cast<XvImage*>(xv_image), x, y, sw, sh, 0, 0, dw, dh);
+ } else {
+#ifdef HAVE_XSHM
+ memcpy(static_cast<XvImage*>(xv_image)->data, data, static_cast<XvImage*>(xv_image)->data_size);
+ rc = XvShmPutImage(qt_xdisplay(), xv_port, win, xv_gc,
+ static_cast<XvImage*>(xv_image), x, y, sw, sh, 0, 0, dw, dh, 0);
+#endif
+ }
+
+ XSync(qt_xdisplay(), False);
+ return rc;
+#endif
+}
+
+
+bool KXvDevice::startVideo(QWidget *w, int dw, int dh)
+{
+ if (!w) return false;
+ return startVideo(w->winId(), dw, dh);
+}
+
+
+bool KXvDevice::startVideo(Window w, int dw, int dh)
+{
+#ifndef HAVE_LIBXV
+ return false;
+#else
+ int sx = 0, sy = 0, dx = 0, dy = 0, sw = dw, sh = dh;
+
+ // Must be a video capable device!
+ if (!(xv_type & XvVideoMask) || !(xv_type & XvInputMask)) {
+ kdWarning() << "KXvDevice::startVideo: This is not a video capable device." << endl;
+ return false;
+ }
+
+ if (videoStarted) stopVideo();
+
+ if (xv_port == -1) {
+ kdWarning() << "KXvDevice::startVideo: No xv_port." << endl;
+ return false;
+ }
+
+ if (w != xv_last_win && xv_gc) {
+ XFreeGC(qt_xdisplay(), xv_gc);
+ xv_gc = 0;
+ }
+
+ if (!xv_gc) {
+ xv_last_win = w;
+ xv_gc = XCreateGC(qt_xdisplay(), w, 0, NULL);
+ }
+
+ if (-1 != xv_encoding) {
+ sw = ((XvEncodingInfo *)xv_encoding_info)[xv_encoding].width;
+ sh = ((XvEncodingInfo *)xv_encoding_info)[xv_encoding].height;
+ }
+
+ // xawtv does this here:
+ // ng_ratio_fixup(&dw, &dh, &dx, &dy);
+
+ kdDebug() << "XvPutVideo: " << qt_xdisplay()
+ << " " << xv_port << " " << w << " " << xv_gc
+ << " " << sx << " " << sy << " " << sw << " " << sh
+ << " " << dx << " " << dy << " " << dw << " " << dh << endl;
+ XvPutVideo(qt_xdisplay(), xv_port, w, xv_gc, sx, sy, sw, sh, dx, dy, dw, dh);
+
+ videoStarted = true;
+ videoWindow = w;
+ return true;
+#endif
+}
+
+bool KXvDevice::stopVideo()
+{
+#ifndef HAVE_LIBXV
+ return false;
+#else
+ if (!videoStarted)
+ return true;
+ if (xv_port == -1) {
+ kdWarning() << "KXvDevice::stopVideo: No xv_port." << endl;
+ return false;
+ }
+
+ XvStopVideo(qt_xdisplay(), xv_port, videoWindow);
+ videoStarted = false;
+ return true;
+#endif
+}
+
+
+KXvDevice::KXvDevice()
+{
+ xv_encoding_info = NULL;
+ xv_formatvalues = NULL;
+ xv_attr = NULL;
+ xv_port = -1;
+ xv_encoding = -1;
+ xv_name = QString::null;
+ xv_type = -1;
+ xv_adaptor = -1;
+ _shm = false;
+#ifdef HAVE_LIBXV
+ xv_imageformat = 0x32595559; // FIXME (YUY2)
+#ifdef HAVE_XSHM
+ if (!XShmQueryExtension(qt_xdisplay())) {
+ _haveShm = false;
+ } else {
+ _shm = true;
+ _haveShm = true;
+ }
+ xv_shminfo = new XShmSegmentInfo;
+#else
+ xv_shminfo = 0;
+#endif
+#endif
+ xv_gc = 0;
+ xv_last_win = 0;
+ videoStarted = false;
+ _attrs.setAutoDelete(true);
+ xv_image = 0;
+ xv_image_w = 320;
+ xv_image_h = 200;
+}
+
+
+KXvDevice::~KXvDevice()
+{
+#ifdef HAVE_LIBXV
+ _attrs.clear();
+ if (videoStarted) stopVideo();
+ if (xv_encoding_info)
+ XvFreeEncodingInfo((XvEncodingInfo *)xv_encoding_info);
+ XFree(xv_formatvalues);
+ XFree(xv_attr);
+#ifdef HAVE_XSHM
+ delete (XShmSegmentInfo*)xv_shminfo;
+#endif
+ destroyImage();
+#endif
+ if (xv_gc)
+ XFreeGC(qt_xdisplay(), xv_gc);
+
+#ifdef HAVE_LIBXV
+ if (xv_port != -1)
+ XvUngrabPort(qt_xdisplay(), xv_port, CurrentTime);
+#endif
+}
+
+
+bool KXvDevice::init()
+{
+#ifndef HAVE_LIBXV
+ return false;
+#else
+ assert(xv_port != -1); // make sure we were prepped by KXv already.
+
+ if (XvGrabPort(qt_xdisplay(), xv_port, CurrentTime)) {
+ kdWarning() << "KXvDevice::init(): Unable to grab Xv port." << endl;
+ return false;
+ }
+
+ if (Success != XvQueryEncodings(qt_xdisplay(),
+ xv_port,
+ &xv_encodings,
+ (XvEncodingInfo **)&xv_encoding_info)) {
+ kdWarning() << "KXvDevice::init: Xv QueryEncodings failed. Dropping Xv support for this device." << endl;
+ return false;
+ }
+
+ // Package the encodings up nicely
+ for (unsigned int i = 0; i < xv_encodings; i++) {
+ //kdDebug() << "Added encoding: " << ((XvEncodingInfo *)xv_encoding_info)[i].name << endl;
+ _encodingList << ((XvEncodingInfo *)xv_encoding_info)[i].name;
+ }
+
+ xv_attr = XvQueryPortAttributes(qt_xdisplay(),
+ xv_port,
+ &xv_encoding_attributes);
+ XvAttribute *xvattr = (XvAttribute *)xv_attr;
+ kdDebug() << "Attributes for port " << xv_port << endl;
+ for (int i = 0; i < xv_encoding_attributes; i++) {
+ assert(xvattr);
+ kdDebug() << " -> " << xvattr[i].name
+ << ((xvattr[i].flags & XvGettable) ? " get" : "")
+ << ((xvattr[i].flags & XvSettable) ? " set" : "")
+ << " Range: " << xvattr[i].min_value
+ << " -> " << xvattr[i].max_value << endl;
+
+ KXvDeviceAttribute *xvda = new KXvDeviceAttribute;
+ xvda->name = xvattr[i].name;
+ xvda->min = xvattr[i].min_value;
+ xvda->max = xvattr[i].max_value;
+ xvda->flags = xvattr[i].flags;
+ _attrs.append(xvda);
+ }
+
+ XvImageFormatValues *fo;
+ fo = XvListImageFormats(qt_xdisplay(), xv_port, &xv_formats);
+ xv_formatvalues = (void *)fo;
+ kdDebug() << "Image formats for port " << xv_port << endl;
+ for (int i = 0; i < xv_formats; i++) {
+ assert(fo);
+ QString imout;
+ imout.sprintf(" 0x%x (%c%c%c%c) %s",
+ fo[i].id,
+ fo[i].id & 0xff,
+ (fo[i].id >> 8) & 0xff,
+ (fo[i].id >> 16) & 0xff,
+ (fo[i].id >> 24) & 0xff,
+ ((fo[i].format == XvPacked) ?
+ "Packed" : "Planar"));
+ kdDebug() << imout << endl;
+ }
+
+ kdDebug() << "Disabling double buffering." << endl;
+ setAttribute("XV_DOUBLE_BUFFER", 0);
+
+ return true;
+#endif
+}
+
+
+bool KXvDevice::supportsWidget(QWidget *w)
+{
+#ifndef HAVE_LIBXV
+ return false;
+#else
+ for (int i = 0; i < xv_nvisualformats; i++) {
+ if (static_cast<XvFormat*>(xv_visualformats)[i].visual_id
+ == static_cast<Visual*>(w->x11Visual())->visualid) {
+ return true;
+ }
+ }
+ return false;
+#endif
+}
+
+
+bool KXvDevice::isVideoSource()
+{
+#ifndef HAVE_LIBXV
+ return false;
+#else
+ if (xv_type & XvVideoMask && xv_type & XvInputMask)
+ return true;
+ return false;
+#endif
+}
+
+
+bool KXvDevice::isImageBackend()
+{
+#ifndef HAVE_LIBXV
+ return false;
+#else
+ if (xv_type & XvImageMask && xv_type & XvInputMask)
+ return true;
+ return false;
+#endif
+}
+
+
+const KXvDeviceAttributes& KXvDevice::attributes()
+{
+ return _attrs;
+}
+
+
+bool KXvDevice::getAttributeRange(const QString& attribute, int *min, int *max)
+{
+ for (KXvDeviceAttribute *at = _attrs.first(); at != NULL; at = _attrs.next()) {
+ if (at->name == attribute) {
+ if (min) *min = at->min;
+ if (max) *max = at->max;
+ return true;
+ }
+ }
+ return false;
+}
+
+
+bool KXvDevice::getAttribute(const QString& attribute, int *val)
+{
+#ifndef HAVE_LIBXV
+ return false;
+#else
+ for (KXvDeviceAttribute *at = _attrs.first(); at != NULL; at = _attrs.next()) {
+ if (at->name == attribute) {
+ if (val)
+ XvGetPortAttribute(qt_xdisplay(), xv_port, at->atom(), val);
+ return true;
+ }
+ }
+ return false;
+#endif
+}
+
+
+bool KXvDevice::setAttribute(const QString& attribute, int val)
+{
+#ifndef HAVE_LIBXV
+ return false;
+#else
+ for (KXvDeviceAttribute *at = _attrs.first(); at != NULL; at = _attrs.next()) {
+ if (at->name == attribute) {
+ XvSetPortAttribute(qt_xdisplay(), xv_port, at->atom(), val);
+ XSync(qt_xdisplay(), False);
+ return true;
+ }
+ }
+ return false;
+#endif
+}
+
+
+const QString& KXvDevice::name() const
+{
+ return xv_name;
+}
+
+
+int KXvDevice::port() const
+{
+ return xv_port;
+}
+
+const QStringList& KXvDevice::encodings() const
+{
+ return _encodingList;
+}
+
+bool KXvDevice::encoding(QString& encoding)
+{
+#ifndef HAVE_LIBXV
+ return false;
+#else
+ XvEncodingID enc;
+
+ for (KXvDeviceAttribute *at = _attrs.first(); at != 0L; at = _attrs.next()) {
+ if (at->name == "XV_ENCODING") {
+ XvGetPortAttribute(qt_xdisplay(), xv_port, at->atom(), (int*)&enc);
+ kdDebug() << "KXvDevice: encoding: " << enc << endl;
+ encoding = enc;
+ return true;
+ }
+ }
+ return false;
+#endif
+}
+
+bool KXvDevice::setEncoding(const QString& e)
+{
+#ifdef HAVE_LIBXV
+ for (unsigned int i = 0; i < xv_encodings; i++) {
+ if (e == ((XvEncodingInfo *)xv_encoding_info)[i].name) {
+ xv_encoding = i;
+ return setAttribute("XV_ENCODING",
+ ((XvEncodingInfo *)xv_encoding_info)[i].encoding_id);
+ }
+ }
+#endif
+ return false;
+}
+
+bool KXvDevice::videoPlaying() const
+{
+ return videoStarted;
+}
+
+
+bool KXvDevice::useShm(bool on)
+{
+#ifndef HAVE_XSHM
+ if (on) {
+ return false;
+ }
+#endif
+ if (!_haveShm) {
+ return false;
+ }
+ if (_shm != on)
+ rebuildImage(xv_image_w, xv_image_h, on);
+ if (_haveShm) // This can change in rebuildImage()
+ _shm = on;
+ return _shm;
+}
+
+
+bool KXvDevice::usingShm() const
+{
+ return _shm;
+}
+
+
+#include <unistd.h>
+void KXvDevice::rebuildImage(int w, int h, bool shm)
+{
+ if (xv_image) {
+ destroyImage();
+ }
+#ifdef HAVE_LIBXV
+ if (!shm) {
+ xv_image = (void*)XvCreateImage(qt_xdisplay(), xv_port, xv_imageformat,
+ 0, w, h);
+ if (!xv_image) {
+ kdWarning() << "KXvDevice::rebuildImage: XvCreateImage failed." << endl;
+ }
+ } else {
+#ifdef HAVE_XSHM
+ memset(xv_shminfo, 0, sizeof(XShmSegmentInfo));
+ xv_image = (void*)XvShmCreateImage(qt_xdisplay(), xv_port, xv_imageformat,
+ 0, w, h, static_cast<XShmSegmentInfo*>(xv_shminfo));
+ if (!xv_image) {
+ kdWarning() << "KXvDevice::rebuildImage: Error using SHM with Xv! Disabling SHM..." << endl;
+ _haveShm = false;
+ _shm = false;
+ xv_image = (void*)XvCreateImage(qt_xdisplay(), xv_port, xv_imageformat,
+ 0, w, h);
+ if (!xv_image) {
+ kdWarning() << "KXvDevice::rebuildImage: XvCreateImage failed." << endl;
+ }
+ } else {
+ static_cast<XShmSegmentInfo*>(xv_shminfo)->shmid =
+ shmget(IPC_PRIVATE,
+ static_cast<XvImage*>(xv_image)->data_size,
+ IPC_CREAT | 0600);
+ static_cast<XShmSegmentInfo*>(xv_shminfo)->shmaddr =
+ (char*)shmat(static_cast<XShmSegmentInfo*>(xv_shminfo)->shmid, 0, 0);
+ static_cast<XShmSegmentInfo*>(xv_shminfo)->readOnly = True;
+ static_cast<XvImage*>(xv_image)->data =
+ static_cast<XShmSegmentInfo*>(xv_shminfo)->shmaddr;
+ XShmAttach(qt_xdisplay(), static_cast<XShmSegmentInfo*>(xv_shminfo));
+ XSync(qt_xdisplay(), False);
+ shmctl(static_cast<XShmSegmentInfo*>(xv_shminfo)->shmid, IPC_RMID, 0);
+ }
+#endif
+ }
+ Q_ASSERT(xv_image != 0);
+ xv_image_w = w;
+ xv_image_h = h;
+#endif
+}
+
+
+void KXvDevice::destroyImage()
+{
+#ifdef HAVE_LIBXV
+ if (!_shm) {
+ if (xv_image) {
+ static_cast<XvImage*>(xv_image)->data = 0;
+ }
+ } else {
+ if (xv_image) {
+#ifdef HAVE_XSHM
+ shmdt(static_cast<XShmSegmentInfo*>(xv_shminfo)->shmaddr);
+#endif
+ }
+ }
+ XFree(xv_image);
+ xv_image = 0;
+#endif
+}
+
+
+Atom KXvDeviceAttribute::atom()
+{
+ return XInternAtom(qt_xdisplay(), name.latin1(), False);
+}
diff --git a/kopete/libkopete/avdevice/kxv.h b/kopete/libkopete/avdevice/kxv.h
new file mode 100644
index 00000000..d386cda9
--- /dev/null
+++ b/kopete/libkopete/avdevice/kxv.h
@@ -0,0 +1,260 @@
+/*
+ * KDE Xv interface
+ *
+ * Copyright (C) 2001 George Staikos ([email protected])
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __KXV_H
+#define __KXV_H
+
+#include <X11/X.h>
+#include <X11/Xlib.h>
+#include <qstring.h>
+#include <qstringlist.h>
+#include <qptrlist.h>
+
+class QWidget;
+class QImage;
+
+class KXvPrivate;
+class KXvDevice;
+class KXvDevicePrivate;
+
+typedef QPtrList<KXvDevice> KXvDeviceList;
+
+
+class KXv
+{
+public:
+ ~KXv();
+
+ /*
+ * To get access to the Xv extension, you call this method. It will return
+ * a KXv* object on success, or NULL if it can't connect.
+ *
+ * d is typically the Window ID
+ */
+ static KXv *connect(Drawable d);
+
+ /*
+ * True if we can connect to the Xv extension.
+ */
+ static bool haveXv();
+
+ /*
+ * Return the list of Xv devices
+ */
+ KXvDeviceList& devices();
+
+protected:
+ KXv();
+ bool init(Drawable d);
+
+ /*** XV info ***/
+ unsigned int xv_version, xv_release, xv_request, xv_event, xv_error;
+ unsigned int xv_adaptors;
+ void *xv_adaptor_info;
+
+ KXvDeviceList _devs;
+
+private:
+ KXvPrivate *d;
+};
+
+
+
+class KXvDeviceAttribute
+{
+public:
+ QString name;
+ int min;
+ int max;
+ int flags;
+
+ Atom atom();
+};
+
+typedef QPtrList<KXvDeviceAttribute> KXvDeviceAttributes;
+
+
+class KXvDevice
+{
+ friend class KXv;
+public:
+
+ KXvDevice();
+ ~KXvDevice();
+
+ /*
+ * return the list of known attributes
+ */
+ const KXvDeviceAttributes& attributes();
+
+ /*
+ * return the range for a given attribute
+ */
+ bool getAttributeRange(const QString& attribute, int *min, int *max);
+
+ /*
+ * get the current value of a given attribute
+ */
+ bool getAttribute(const QString& attribute, int *val);
+
+ /*
+ * set the current value of a given attribute
+ */
+ bool setAttribute(const QString& attribute, int val);
+
+ bool grabStill(QImage *pix, int dw, int dh);
+
+ /*
+ * True if this device can operate on the given widget
+ */
+ bool supportsWidget(QWidget *w);
+
+ /*
+ * Display the given image with Xv.
+ */
+ int displayImage(QWidget *widget, const unsigned char *const data, int w, int h, int dw, int dh);
+ int displayImage(Window win, const unsigned char *const data, int w, int h, int dw, int dh);
+
+ /*
+ * Display a portion of the given image with Xv.
+ */
+ int displayImage(QWidget *widget, const unsigned char *const data, int w, int h, int x, int y, int sw, int sh, int dw, int dh);
+ int displayImage(Window win, const unsigned char *const data, int w, int h, int x, int y, int sw, int sh, int dw, int dh);
+
+ /*
+ * Start a video stream in widget w, width dw, height dh
+ */
+ bool startVideo(QWidget *w, int dw, int dh);
+ bool startVideo(Window w, int dw, int dh);
+
+ /*
+ * Is the video playing
+ */
+ bool videoPlaying() const;
+
+ /*
+ * Stop video stream
+ */
+ bool stopVideo();
+
+ /*
+ * True if this is an image output backend (video card)
+ */
+ bool isImageBackend();
+
+ /*
+ * True if this is a video source
+ */
+ bool isVideoSource();
+
+ /*
+ * Name of the device
+ */
+ const QString& name() const;
+
+ /*
+ * The Xv port for this device
+ */
+ int port() const;
+
+ /*
+ * The list of encodings/norms available
+ */
+ const QStringList& encodings() const;
+
+ /*
+ * get encoding
+ */
+ bool encoding(QString& encoding);
+
+ /*
+ * Set the encoding to the given one. This should be taken from the list.
+ */
+ bool setEncoding(const QString& e);
+
+ /*
+ * Set the image format. (ex YUV)
+ */
+ int setImageFormat(int format);
+
+ /*
+ * Get the current image format
+ */
+ int imageFormat() const;
+
+ /*
+ * Use SHM for PutImage if available
+ */
+ bool useShm(bool on);
+
+ /*
+ * Is SHM being used?
+ */
+ bool usingShm() const;
+
+
+protected:
+ bool init();
+
+ bool _shm;
+ KXvDeviceAttributes _attrs;
+
+ int xv_type, xv_adaptor;
+ QString xv_name;
+ int xv_port;
+ unsigned int xv_encodings;
+ int xv_encoding;
+ void *xv_encoding_info;
+ int xv_encoding_attributes;
+ void *xv_attr;
+ GC xv_gc;
+ Window xv_last_win;
+
+ QStringList _encodingList;
+
+ int xv_formats;
+ void *xv_formatvalues;
+
+ int xv_nvisualformats;
+ void *xv_visualformats; // XvFormat*
+
+ bool videoStarted;
+ Window videoWindow;
+
+ long xv_imageformat;
+
+ void *xv_shminfo;
+ void *xv_image;
+ int xv_image_w;
+ int xv_image_h;
+ bool _haveShm;
+
+
+private:
+ KXvDevicePrivate *d;
+
+ void rebuildImage(int w, int h, bool shm);
+ void destroyImage();
+};
+
+
+#endif
+
diff --git a/kopete/libkopete/avdevice/qvideo.cpp b/kopete/libkopete/avdevice/qvideo.cpp
new file mode 100644
index 00000000..ad6fb762
--- /dev/null
+++ b/kopete/libkopete/avdevice/qvideo.cpp
@@ -0,0 +1,154 @@
+/***************************************************************************
+ qvideo.cpp
+ ----------
+ begin : Sat Jun 12 2004
+ copyright : (C) 2004 by Dirk Ziegelmeier
+ (C) 2002 by George Staikos
+ ***************************************************************************/
+
+/*
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include "qvideo.h"
+
+#include <kdebug.h>
+#include <qpaintdevice.h>
+
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+
+unsigned int QVideo::bytesppForFormat(ImageFormat fmt)
+{
+ switch (fmt) {
+ case FORMAT_RGB32:
+ case FORMAT_RGB24:
+ case FORMAT_BGR32:
+ case FORMAT_BGR24:
+ return 4;
+
+ case FORMAT_RGB15_LE:
+ case FORMAT_RGB16_LE:
+ case FORMAT_RGB15_BE:
+ case FORMAT_RGB16_BE:
+ case FORMAT_YUYV:
+ case FORMAT_UYVY:
+ case FORMAT_YUV422P:
+ case FORMAT_YUV420P:
+ return 2;
+
+ case FORMAT_GREY:
+ case FORMAT_HI240:
+ return 1;
+
+ default:
+ // unknown format
+ return 0;
+ }
+}
+
+bool QVideo::findDisplayProperties(ImageFormat& fmt, int& depth, unsigned int& bitsperpixel, int& bytesperpixel)
+{
+ XVisualInfo *vi_in, vi_out;
+ long mask = VisualScreenMask;
+ int nvis = 0;
+
+ ImageFormat p = FORMAT_NONE;
+ int bpp = 0;
+ int d = 0;
+
+ vi_out.screen = QPaintDevice::x11AppScreen();
+ vi_in = XGetVisualInfo(qt_xdisplay(), mask, &vi_out, &nvis);
+
+ if (vi_in) {
+ for (int i = 0; i < nvis; i++) {
+ bpp = 0;
+ int n;
+ XPixmapFormatValues *pf = XListPixmapFormats(qt_xdisplay(),&n);
+ d = vi_in[i].depth;
+ for (int j = 0; j < n; j++) {
+ if (pf[j].depth == d) {
+ bpp = pf[j].bits_per_pixel;
+ break;
+ }
+ }
+ XFree(pf);
+
+ // FIXME: Endianess detection
+
+ p = FORMAT_NONE;
+ switch (bpp) {
+ case 32:
+ if (vi_in[i].red_mask == 0xff0000 &&
+ vi_in[i].green_mask == 0x00ff00 &&
+ vi_in[i].blue_mask == 0x0000ff) {
+ p = FORMAT_BGR32;
+ kdDebug() << "QVideo: Found BGR32 display." << endl;
+ }
+ break;
+ case 24:
+ if (vi_in[i].red_mask == 0xff0000 &&
+ vi_in[i].green_mask == 0x00ff00 &&
+ vi_in[i].blue_mask == 0x0000ff) {
+ p = FORMAT_BGR24;
+ kdDebug() << "QVideo: Found BGR24 display." << endl;
+ }
+ break;
+ case 16:
+ if (vi_in[i].red_mask == 0x00f800 &&
+ vi_in[i].green_mask == 0x0007e0 &&
+ vi_in[i].blue_mask == 0x00001f) {
+ p = FORMAT_RGB15_LE;
+ kdDebug() << "QVideo: Found RGB16_LE display." << endl;
+ } else
+ if (vi_in[i].red_mask == 0x007c00 &&
+ vi_in[i].green_mask == 0x0003e0 &&
+ vi_in[i].blue_mask == 0x00001f) {
+ p = FORMAT_RGB15_LE;
+ kdDebug() << "QVideo: Found RGB15_LE display." << endl;
+ }
+ break;
+ case 8:
+ default:
+ continue;
+ }
+
+ if (p != FORMAT_NONE)
+ break;
+ }
+ XFree(vi_in);
+ }
+
+ if (p != FORMAT_NONE) {
+ int bytespp = bytesppForFormat(p);
+ kdDebug() << "QVideo: Display properties: depth: " << d
+ << ", bits/pixel: " << bpp
+ << ", bytes/pixel: " << bytespp << endl;
+ fmt = p;
+ bitsperpixel = bpp;
+ bytesperpixel = bytespp;
+ depth = d;
+ return true;
+ } else {
+ kdWarning() << "QVideo: Unable to find out palette. What display do you have????" << endl;
+ fmt = FORMAT_NONE;
+ bitsperpixel = 0;
+ bytesperpixel = 0;
+ depth = 0;
+ return false;
+ }
+}
diff --git a/kopete/libkopete/avdevice/qvideo.h b/kopete/libkopete/avdevice/qvideo.h
new file mode 100644
index 00000000..20e999fc
--- /dev/null
+++ b/kopete/libkopete/avdevice/qvideo.h
@@ -0,0 +1,68 @@
+// -*- c++ -*-
+/***************************************************************************
+ qvideo.h
+ --------
+ begin : Sat Jun 12 2004
+ copyright : (C) 2004 by Dirk Ziegelmeier
+ ***************************************************************************/
+
+/*
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef QVIDEO_H
+#define QVIDEO_H
+
+class QVideo
+{
+public:
+ typedef enum {
+ FORMAT_NONE = 0,
+ FORMAT_GREY = (1<<0),
+ FORMAT_HI240 = (1<<1),
+ FORMAT_RGB15_LE = (1<<2),
+ FORMAT_RGB15_BE = (1<<3),
+ FORMAT_RGB16_LE = (1<<4),
+ FORMAT_RGB16_BE = (1<<5),
+ FORMAT_RGB32 = (1<<6),
+ FORMAT_BGR32 = (1<<7),
+ FORMAT_RGB24 = (1<<8),
+ FORMAT_BGR24 = (1<<9),
+ FORMAT_YUYV = (1<<10),
+ FORMAT_UYVY = (1<<11),
+ FORMAT_YUV422P = (1<<12),
+ FORMAT_YUV420P = (1<<13),
+ FORMAT_ALL = 0x00003FFF
+ } ImageFormat;
+
+ typedef enum {
+ METHOD_NONE = 0,
+ METHOD_XSHM = 1,
+ METHOD_XV = 2,
+ METHOD_XVSHM = 4,
+ METHOD_X11 = 8,
+ METHOD_DGA = 16, /* unimplemented */
+ METHOD_GL = 32,
+ METHOD_SDL = 64 /* unimplemented */
+ } VideoMethod;
+
+ static unsigned int bytesppForFormat(ImageFormat fmt);
+ static bool findDisplayProperties(ImageFormat& fmt, int& depth, unsigned int& bitsperpixel, int& bytesperpixel);
+};
+
+#endif //QVIDEO_H
+
diff --git a/kopete/libkopete/avdevice/qvideostream.cpp b/kopete/libkopete/avdevice/qvideostream.cpp
new file mode 100644
index 00000000..cd7aafc2
--- /dev/null
+++ b/kopete/libkopete/avdevice/qvideostream.cpp
@@ -0,0 +1,731 @@
+/*
+ *
+ * Copyright (C) 2002 George Staikos <[email protected]>
+ * 2004 Dirk Ziegelmeier <[email protected]>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include "qvideostream.h"
+#include <qevent.h>
+#include <qimage.h>
+#include <qtimer.h>
+
+#include <kdebug.h>
+#include "kxv.h"
+
+#include <sys/types.h>
+#include <X11/Xutil.h>
+
+#ifdef HAVE_XSHM
+extern "C" {
+#include <sys/shm.h>
+#include <X11/X.h>
+#include <X11/Xlib.h>
+#include <X11/extensions/XShm.h>
+}
+#endif
+
+#ifdef HAVE_GL
+class QVideoStreamGLWidget : public QGLWidget
+{
+public:
+ QVideoStreamGLWidget(QWidget* parent = 0, const char* name = 0);
+ virtual ~QVideoStreamGLWidget();
+
+ void setInputSize(const QSize& sz);
+ void display(const unsigned char *const img, int x, int y, int sw, int sh);
+
+private:
+ virtual void resizeGL(int w, int h);
+ void initializeGL();
+
+ virtual bool eventFilter( QObject *o, QEvent *e );
+ void calc(QPoint& p, QPoint& v);
+
+
+ QSize _inputSize;
+ GLuint _tex;
+ int _tw, _th;
+ QWidget* _w;
+ int _maxGL;
+ QSize _sz;
+ bool _glfun;
+ QPoint _ul, _ur, _ll, _lr;
+ QPoint _vul, _vur, _vll, _vlr;
+ QTimer* _glfunTimer;
+};
+#endif
+
+class QVideoStreamPrivate
+{
+public:
+ QVideoStreamPrivate();
+ ~QVideoStreamPrivate();
+ KXv *xvHandle;
+ KXvDevice *xvdev;
+ XImage *xim;
+ GC gc;
+#ifdef HAVE_GL
+ QVideoStreamGLWidget* glwidget;
+#endif
+#ifdef HAVE_XSHM
+ XShmSegmentInfo shmh;
+#endif
+};
+
+QVideoStreamPrivate::QVideoStreamPrivate()
+{
+ xvHandle = 0;
+ xim = 0;
+}
+
+QVideoStreamPrivate::~QVideoStreamPrivate()
+{
+ delete xvHandle;
+}
+
+QVideoStream::QVideoStream(QWidget *widget, const char* name)
+ : QObject(widget, name),
+ d(new QVideoStreamPrivate),
+ _w(widget),
+ _methods(METHOD_NONE),
+ _method(METHOD_NONE),
+ _format(FORMAT_NONE),
+ _init(false)
+{
+ int dummy;
+ unsigned int dummy2;
+ findDisplayProperties(_xFormat, dummy, dummy2, dummy);
+
+ _methods = (VideoMethod)(_methods | METHOD_X11);
+
+#ifdef HAVE_XSHM
+ if (XShmQueryExtension(_w->x11Display())) {
+ _methods = (VideoMethod)(_methods | METHOD_XSHM);
+ }
+#endif
+
+ if (KXv::haveXv()) {
+ _methods = (VideoMethod)(_methods | METHOD_XV);
+#ifdef HAVE_XSHM
+ _methods = (VideoMethod)(_methods | METHOD_XVSHM);
+#endif
+ }
+
+#ifdef HAVE_GL
+ if (QGLFormat::hasOpenGL()) {
+ _methods = (VideoMethod)(_methods | METHOD_GL);
+ }
+#endif
+
+ d->gc = XCreateGC(_w->x11Display(), _w->winId(), 0, NULL);
+}
+
+QVideoStream::~QVideoStream()
+{
+ deInit();
+ XFreeGC(_w->x11Display(), d->gc);
+ delete d;
+}
+
+void QVideoStream::deInit()
+{
+ if (!_init)
+ return;
+
+ _init = false;
+ _format = FORMAT_NONE;
+
+ Q_ASSERT(_methods & _method);
+ if (!(_methods & _method))
+ return;
+
+ switch (_method) {
+ case METHOD_XSHM:
+#ifdef HAVE_XSHM
+ XShmDetach(_w->x11Display(), &(d->shmh));
+ XDestroyImage(d->xim);
+ d->xim = 0;
+ shmdt(d->shmh.shmaddr);
+#endif
+ break;
+ case METHOD_X11:
+ delete[] d->xim->data;
+ d->xim->data = 0;
+ XDestroyImage(d->xim);
+ d->xim = 0;
+ break;
+ case METHOD_XVSHM:
+ case METHOD_XV:
+ delete d->xvHandle;
+ d->xvHandle = 0;
+ break;
+ case METHOD_GL:
+#ifdef HAVE_GL
+ delete d->glwidget;
+#endif
+ break;
+ default:
+ Q_ASSERT(0);
+ return;
+ }
+}
+
+void QVideoStream::init()
+{
+ Q_ASSERT(_methods & _method);
+ if (!(_methods & _method))
+ return;
+
+ switch (_method) {
+ case METHOD_XSHM:
+ {
+#ifdef HAVE_XSHM
+ if ( !_inputSize.isValid() ) {
+ kdWarning() << "QVideoStream::init() (XSHM): Unable to initialize due to invalid input size." << endl;
+ return;
+ }
+
+ memset(&(d->shmh), 0, sizeof(XShmSegmentInfo));
+ d->xim = XShmCreateImage(_w->x11Display(),
+ (Visual*)_w->x11Visual(),
+ _w->x11Depth(),
+ ZPixmap, 0, &(d->shmh),
+ _inputSize.width(),
+ _inputSize.height());
+ d->shmh.shmid = shmget(IPC_PRIVATE,
+ d->xim->bytes_per_line*d->xim->height,
+ IPC_CREAT|0600);
+ d->shmh.shmaddr = (char *)shmat(d->shmh.shmid, 0, 0);
+ d->xim->data = (char*)d->shmh.shmaddr;
+ d->shmh.readOnly = False;
+ Status s = XShmAttach(_w->x11Display(), &(d->shmh));
+ if (s) {
+ XSync(_w->x11Display(), False);
+ shmctl(d->shmh.shmid, IPC_RMID, 0);
+ _format = _xFormat;
+ _init = true;
+ } else {
+ kdWarning() << "XShmAttach failed!" << endl;
+ XDestroyImage(d->xim);
+ d->xim = 0;
+ shmdt(d->shmh.shmaddr);
+ }
+#endif
+ }
+ break;
+ case METHOD_X11:
+ if ( !_inputSize.isValid() ) {
+ kdWarning() << "QVideoStream::init() (X11): Unable to initialize due to invalid input size." << endl;
+ return;
+ }
+
+ d->xim = XCreateImage(_w->x11Display(),
+ (Visual*)_w->x11Visual(),
+ _w->x11Depth(),
+ ZPixmap, 0, 0,
+ _inputSize.width(),
+ _inputSize.height(),
+ 32, 0);
+
+ d->xim->data = new char[d->xim->bytes_per_line*_inputSize.height()];
+ _format = _xFormat;
+ _init = true;
+ break;
+ case METHOD_XVSHM:
+ case METHOD_XV:
+ {
+ if (d->xvHandle)
+ delete d->xvHandle;
+
+ d->xvHandle = KXv::connect(_w->winId());
+ KXvDeviceList& xvdl(d->xvHandle->devices());
+ KXvDevice *xvdev = NULL;
+
+ for (xvdev = xvdl.first(); xvdev; xvdev = xvdl.next()) {
+ if (xvdev->isImageBackend() &&
+ xvdev->supportsWidget(_w)) {
+ d->xvdev = xvdev;
+ d->xvdev->useShm(_method == METHOD_XVSHM);
+ _format = FORMAT_YUYV;
+ _init = true;
+ break;
+ }
+ }
+
+ if (!_init) {
+ delete d->xvHandle;
+ d->xvHandle = 0;
+ }
+ }
+ break;
+ case METHOD_GL:
+#ifdef HAVE_GL
+ d->glwidget = new QVideoStreamGLWidget(_w, "QVideoStreamGLWidget");
+ d->glwidget->resize(_w->width(), _w->height());
+ d->glwidget->show();
+ _format = FORMAT_BGR24;
+ _init = true;
+#endif
+ break;
+ default:
+ Q_ASSERT(0);
+ return;
+ }
+}
+
+bool QVideoStream::haveMethod(VideoMethod method) const
+{
+ return _methods & method;
+}
+
+QVideo::VideoMethod QVideoStream::method() const
+{
+ return _method;
+}
+
+QVideo::VideoMethod QVideoStream::setMethod(VideoMethod method)
+{
+ if (_methods & method) {
+ deInit();
+ _method = method;
+ init();
+ }
+
+ return _method;
+}
+
+QSize QVideoStream::maxSize() const
+{
+ return _size;
+}
+
+int QVideoStream::maxWidth() const
+{
+ return _size.width();
+}
+
+int QVideoStream::maxHeight() const
+{
+ return _size.height();
+}
+
+QSize QVideoStream::size() const
+{
+ return _size;
+}
+
+int QVideoStream::width() const
+{
+ return _size.width();
+}
+
+int QVideoStream::height() const
+{
+ return _size.height();
+}
+
+QSize QVideoStream::setSize(const QSize& sz)
+{
+ _size = sz;
+ return _size;
+}
+
+int QVideoStream::setWidth(int width)
+{
+ if (width < 0)
+ width = 0;
+ if (width > maxWidth())
+ width = maxWidth();
+ _size.setWidth(width);
+ return _size.width();
+}
+
+int QVideoStream::setHeight(int height)
+{
+ if (height < 0)
+ height = 0;
+ if (height > maxHeight())
+ height = maxHeight();
+ _size.setHeight(height);
+ return _size.height();
+}
+
+QSize QVideoStream::inputSize() const
+{
+ return _inputSize;
+}
+
+int QVideoStream::inputWidth() const
+{
+ return _inputSize.width();
+}
+
+int QVideoStream::inputHeight() const
+{
+ return _inputSize.height();
+}
+
+QSize QVideoStream::setInputSize(const QSize& sz)
+{
+ if (sz == _inputSize)
+ return _inputSize;
+ _inputSize = sz;
+ if (_method & (METHOD_XSHM | METHOD_X11)) {
+ deInit();
+ init();
+ }
+#ifdef HAVE_GL
+ if (_method & METHOD_GL) {
+ d->glwidget->setInputSize(_inputSize);
+ }
+#endif
+ return _inputSize;
+}
+
+int QVideoStream::setInputWidth(int width)
+{
+ if (width == _inputSize.width())
+ return _inputSize.width();
+ _inputSize.setWidth(width);
+ if (_method & (METHOD_XSHM | METHOD_X11)) {
+ deInit();
+ init();
+ }
+#ifdef HAVE_GL
+ if (_method & METHOD_GL) {
+ d->glwidget->setInputSize(_inputSize);
+ }
+#endif
+ return _inputSize.width();
+}
+
+int QVideoStream::setInputHeight(int height)
+{
+ if (height == _inputSize.height())
+ return _inputSize.height();
+ _inputSize.setHeight(height);
+ if (_method & (METHOD_XSHM | METHOD_X11)) {
+ deInit();
+ init();
+ }
+#ifdef HAVE_GL
+ if (_method & METHOD_GL) {
+ d->glwidget->setInputSize(_inputSize);
+ }
+#endif
+ return _inputSize.height();
+}
+
+bool QVideoStream::supportsFormat(VideoMethod method, ImageFormat format)
+{
+ return (bool)(formatsForMethod(method) & format);
+}
+
+QVideo::ImageFormat QVideoStream::formatsForMethod(VideoMethod method)
+{
+ switch(method) {
+ case METHOD_XSHM:
+ case METHOD_X11:
+ return _xFormat;
+ case METHOD_XV:
+ case METHOD_XVSHM:
+ return FORMAT_YUYV;
+ case METHOD_GL:
+ return FORMAT_BGR24;
+ default:
+ return FORMAT_NONE;
+ }
+}
+
+QVideo::ImageFormat QVideoStream::format() const
+{
+ return _format;
+}
+
+bool QVideoStream::setFormat(ImageFormat format)
+{
+ if(supportsFormat(_method, format)) {
+ _format = format;
+ return true;
+ } else {
+ return false;
+ }
+}
+
+int QVideoStream::displayFrame(const unsigned char *const img)
+{
+ return displayFrame(img, 0, 0, _inputSize.width(), _inputSize.height());
+}
+
+int QVideoStream::displayFrame(const unsigned char *const img, int x, int y, int sw, int sh)
+{
+ Q_ASSERT(_init);
+ if (!_init)
+ return -1;
+
+ Q_ASSERT(_methods & _method);
+ if (!(_methods & _method))
+ return -1;
+
+ switch (_method) {
+ case METHOD_XV:
+ case METHOD_XVSHM:
+ return d->xvdev->displayImage(_w, img,
+ _inputSize.width(), _inputSize.height(), x, y, sw, sh,
+ _size.width(), _size.height());
+ break;
+ case METHOD_XSHM:
+#ifdef HAVE_XSHM
+ memcpy(d->xim->data,img,d->xim->bytes_per_line*d->xim->height);
+ XShmPutImage(_w->x11Display(), _w->winId(), d->gc, d->xim,
+ x, y,
+ 0, 0,
+ sw, sh,
+ 0);
+ XSync(_w->x11Display(), False);
+ break;
+#else
+ return -1;
+#endif
+ case METHOD_X11:
+ memcpy(d->xim->data, img, d->xim->bytes_per_line*d->xim->height);
+ XPutImage(_w->x11Display(), _w->winId(), d->gc, d->xim,
+ x, y,
+ 0, 0,
+ sw, sh);
+ XSync(_w->x11Display(), False);
+ break;
+ case METHOD_GL:
+#ifdef HAVE_GL
+ d->glwidget->display(img, x, y, sw, sh);
+#endif
+ break;
+ default:
+ Q_ASSERT(0);
+ return -1;
+ }
+
+ return 0;
+}
+
+QVideoStream& QVideoStream::operator<<(const unsigned char *const img)
+{
+ displayFrame(img);
+ return *this;
+}
+
+// ---------------------------------------------------------------------------------------
+#ifdef HAVE_GL
+
+QVideoStreamGLWidget::QVideoStreamGLWidget(QWidget* parent, const char* name)
+ : QGLWidget(QGLFormat(QGL::DoubleBuffer | QGL::Rgba | QGL::DirectRendering), parent, name),
+ _tex(0),
+ _w(parent),
+ _glfun(false)
+{
+ kdDebug() << "QVideoStreamGLWidget::QVideoStreamGLWidget()" << endl;
+
+ connect(_w, SIGNAL(resized(int, int)),
+ this, SLOT(resize(int, int)));
+
+ topLevelWidget()->installEventFilter(this);
+ _glfunTimer = new QTimer();
+}
+
+QVideoStreamGLWidget::~QVideoStreamGLWidget()
+{
+ kdDebug() << "QVideoStreamGLWidget::~QVideoStreamGLWidget()" << endl;
+ delete _glfunTimer;
+
+ makeCurrent();
+ if(_tex != 0) {
+ glDeleteTextures(1, &_tex);
+ }
+}
+
+bool QVideoStreamGLWidget::eventFilter(QObject*, QEvent* e)
+{
+ // For some reason, KeyPress does not work (yields 2), QEvent::KeyPress is unknown... What the f...????
+ // I am too lazy to scan the header files for the reason.
+ if(e->type() == 6) {
+ QKeyEvent* ke = static_cast<QKeyEvent*>(e);
+ if(ke->key() == Qt::Key_Pause) {
+ _glfunTimer->start(500, true);
+ } else if (_glfunTimer->isActive() && (ke->key() == Qt::Key_Escape)) {
+ _glfun = !_glfun;
+ }
+ }
+ return false;
+}
+
+void QVideoStreamGLWidget::initializeGL()
+{
+ kdDebug() << "QVideoStreamGLWidget::initializeGL()" << endl;
+ setAutoBufferSwap(false);
+
+ QGLFormat f = format();
+ glGetIntegerv(GL_MAX_TEXTURE_SIZE, &_maxGL);
+ kdDebug() << "OpenGL capabilities (* = required):" << endl;
+ kdDebug() << " Valid context*: " << isValid() << endl;
+ kdDebug() << " DoubleBuffer*: " << f.doubleBuffer() << endl;
+ kdDebug() << " Depth: " << f.depth() << endl;
+ kdDebug() << " RGBA*: " << f.rgba() << endl;
+ kdDebug() << " Alpha: " << f.alpha() << endl;
+ kdDebug() << " Accum: " << f.accum() << endl;
+ kdDebug() << " Stencil: " << f.stencil() << endl;
+ kdDebug() << " Stereo: " << f.stereo() << endl;
+ kdDebug() << " DirectRendering*: " << f.directRendering() << endl;
+ kdDebug() << " Overlay: " << f.hasOverlay() << endl;
+ kdDebug() << " Plane: " << f.plane() << endl;
+ kdDebug() << " MAX_TEXTURE_SIZE: " << _maxGL << endl;
+
+ qglClearColor(Qt::black);
+ glShadeModel(GL_FLAT);
+ glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+
+ _vul = QPoint( 4, 10);
+ _vur = QPoint(-8, 4);
+ _vll = QPoint(10, -4);
+ _vlr = QPoint(-8, -10);
+}
+
+void QVideoStreamGLWidget::resizeGL(int w, int h)
+{
+ _sz = QSize(w, h);
+
+ glViewport(0, 0, w, h);
+ glMatrixMode(GL_PROJECTION);
+ glLoadIdentity();
+ glOrtho(0.0, w, 0.0, h, -1, 1);
+ glMatrixMode(GL_MODELVIEW);
+ glLoadIdentity();
+
+ _ul = QPoint(0, 0);
+ _ur = QPoint(w, 0);
+ _ll = QPoint(0, h);
+ _lr = QPoint(w, h);
+}
+
+void QVideoStreamGLWidget::setInputSize(const QSize& sz)
+{
+ makeCurrent();
+
+ _inputSize = sz;
+ int iw = _inputSize.width();
+ int ih = _inputSize.height();
+
+ if ( (iw > _maxGL) || (ih > _maxGL) ) {
+ kdWarning() << "QVideoStreamGLWidget::setInputSize(): Texture too large! maxGL: " << _maxGL << endl;
+ return;
+ }
+
+ // textures have power-of-two x,y dimensions
+ int i;
+ for (i = 0; iw >= (1 << i); i++)
+ ;
+ _tw = (1 << i);
+ for (i = 0; ih >= (1 << i); i++)
+ ;
+ _th = (1 << i);
+
+ // Generate texture
+ if(_tex != 0) {
+ glDeleteTextures(1, &_tex);
+ }
+ glGenTextures(1, &_tex);
+ glBindTexture(GL_TEXTURE_2D, _tex);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+
+ // Blank texture
+ char* dummy = new char[_tw*_th*4];
+ memset(dummy, 128, _tw*_th*4);
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, _tw, _th, 0,
+ GL_RGB, GL_UNSIGNED_BYTE, dummy);
+ delete[] dummy;
+}
+
+void QVideoStreamGLWidget::display(const unsigned char *const img, int x, int y, int sw, int sh)
+{
+ makeCurrent();
+
+ // FIXME: Endianess - also support GL_RGB
+ glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, _inputSize.width(), _inputSize.height(),
+ GL_BGR, GL_UNSIGNED_BYTE, img);
+
+ // upper right coords
+ float ur_x = (float)(x + sw) / _tw;
+ float ur_y = (float)(y + sh) / _th;
+
+ // lower left coords
+ float ll_x = (float)(x) / _tw;
+ float ll_y = (float)(y) / _th;
+
+ glEnable(GL_TEXTURE_2D);
+ glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
+ glBindTexture(GL_TEXTURE_2D, _tex);
+ if (!_glfun) {
+ glBegin(GL_QUADS);
+ glTexCoord2f(ll_x, ur_y); glVertex2i(0, 0 );
+ glTexCoord2f(ll_x, ll_y); glVertex2i(0, _sz.height());
+ glTexCoord2f(ur_x, ll_y); glVertex2i(_sz.width(), _sz.height());
+ glTexCoord2f(ur_x, ur_y); glVertex2i(_sz.width(), 0 );
+ glEnd();
+ } else {
+ calc(_ul, _vul);
+ calc(_ur, _vur);
+ calc(_ll, _vll);
+ calc(_lr, _vlr);
+
+ glClear(GL_COLOR_BUFFER_BIT);
+
+ glBegin(GL_QUADS);
+ glTexCoord2f(0, y); glVertex2i(_ul.x(), _ul.y());
+ glTexCoord2f(0, 0); glVertex2i(_ll.x(), _ll.y());
+ glTexCoord2f(x, 0); glVertex2i(_lr.x(), _lr.y());
+ glTexCoord2f(x, y); glVertex2i(_ur.x(), _ur.y());
+ glEnd();
+ }
+ swapBuffers();
+ glDisable(GL_TEXTURE_2D);
+}
+
+void QVideoStreamGLWidget::calc(QPoint& p, QPoint& v)
+{
+ p += v;
+
+ if(p.x() < 0) {
+ p.setX(-p.x());
+ v.setX(-v.x());
+ }
+ if(p.y() < 0) {
+ p.setY(-p.y());
+ v.setY(-v.y());
+ }
+ if(p.x() > _sz.width()) {
+ p.setX(_sz.width() - (p.x() - _sz.width()));
+ v.setX(-v.x());
+ }
+ if(p.y() > _sz.height()) {
+ p.setY(_sz.height() - (p.y() - _sz.height()));
+ v.setY(-v.y());
+ }
+}
+#endif
+
+#include "qvideostream.moc"
diff --git a/kopete/libkopete/avdevice/qvideostream.h b/kopete/libkopete/avdevice/qvideostream.h
new file mode 100644
index 00000000..801fa829
--- /dev/null
+++ b/kopete/libkopete/avdevice/qvideostream.h
@@ -0,0 +1,112 @@
+// -*- c++ -*-
+
+/*
+ *
+ * Copyright (C) 2002 George Staikos <[email protected]>
+ * 2004 Dirk Ziegelmeier <[email protected]>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef _QVIDEOSTREAM_H
+#define _QVIDEOSTREAM_H
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#ifdef HAVE_GL
+#include <qgl.h>
+#include <GL/gl.h>
+#endif
+
+#include <qwidget.h>
+#include "qvideo.h"
+
+class QVideoStreamPrivate;
+
+/**
+ * QT-style video stream driver.
+ */
+class QVideoStream : public QObject, public QVideo
+{
+ Q_OBJECT
+
+public:
+ QVideoStream(QWidget *widget, const char* name = 0);
+ ~QVideoStream();
+
+ /* output method */
+ bool haveMethod(VideoMethod method) const;
+ VideoMethod method() const;
+ VideoMethod setMethod(VideoMethod method);
+
+ /* max output sizes */
+ QSize maxSize() const;
+ int maxWidth() const;
+ int maxHeight() const;
+
+ /* output sizes */
+ QSize size() const;
+ int width() const;
+ int height() const;
+
+ QSize setSize(const QSize& sz);
+ int setWidth(int width);
+ int setHeight(int height);
+
+ /* input sizes */
+ QSize inputSize() const;
+ int inputWidth() const;
+ int inputHeight() const;
+
+ QSize setInputSize(const QSize& sz);
+ int setInputWidth(int width);
+ int setInputHeight(int height);
+
+ /* input format */
+ ImageFormat format() const;
+ bool setFormat(ImageFormat format);
+
+ /* functions to find out about formats */
+ ImageFormat formatsForMethod(VideoMethod method);
+ bool supportsFormat(VideoMethod method, ImageFormat format);
+
+ /* Display image */
+ QVideoStream& operator<<(const unsigned char *const img);
+
+public slots:
+ int displayFrame(const unsigned char *const img);
+ int displayFrame(const unsigned char *const img, int x, int y, int sw, int sh);
+
+private:
+ QVideoStreamPrivate* d;
+
+ QWidget* _w;
+ VideoMethod _methods; // list of methods
+ VideoMethod _method; // the current method
+ ImageFormat _format;
+ QSize _size;
+ QSize _inputSize;
+ bool _init;
+ ImageFormat _xFormat;
+
+ void deInit();
+ void init();
+};
+
+#endif
+
diff --git a/kopete/libkopete/avdevice/sonix_compress.cpp b/kopete/libkopete/avdevice/sonix_compress.cpp
new file mode 100644
index 00000000..400635c4
--- /dev/null
+++ b/kopete/libkopete/avdevice/sonix_compress.cpp
@@ -0,0 +1,180 @@
+#include "sonix_compress.h"
+
+#define CLAMP(x) ((x)<0?0:((x)>255)?255:(x))
+
+typedef struct {
+ int is_abs;
+ int len;
+ int val;
+ int unk;
+} code_table_t;
+
+
+/* local storage */
+static code_table_t table[256];
+static int init_done = 0;
+
+/* global variable */
+int sonix_unknown = 0;
+
+/*
+ sonix_decompress_init
+ =====================
+ pre-calculates a locally stored table for efficient huffman-decoding.
+
+ Each entry at index x in the table represents the codeword
+ present at the MSB of byte x.
+
+*/
+void sonix_decompress_init(void)
+{
+ int i;
+ int is_abs, val, len, unk;
+
+ for (i = 0; i < 256; i++) {
+ is_abs = 0;
+ val = 0;
+ len = 0;
+ unk = 0;
+ if ((i & 0x80) == 0) {
+ /* code 0 */
+ val = 0;
+ len = 1;
+ }
+ else if ((i & 0xE0) == 0x80) {
+ /* code 100 */
+ val = +4;
+ len = 3;
+ }
+ else if ((i & 0xE0) == 0xA0) {
+ /* code 101 */
+ val = -4;
+ len = 3;
+ }
+ else if ((i & 0xF0) == 0xD0) {
+ /* code 1101 */
+ val = +11;
+ len = 4;
+ }
+ else if ((i & 0xF0) == 0xF0) {
+ /* code 1111 */
+ val = -11;
+ len = 4;
+ }
+ else if ((i & 0xF8) == 0xC8) {
+ /* code 11001 */
+ val = +20;
+ len = 5;
+ }
+ else if ((i & 0xFC) == 0xC0) {
+ /* code 110000 */
+ val = -20;
+ len = 6;
+ }
+ else if ((i & 0xFC) == 0xC4) {
+ /* code 110001xx: unknown */
+ val = 0;
+ len = 8;
+ unk = 1;
+ }
+ else if ((i & 0xF0) == 0xE0) {
+ /* code 1110xxxx */
+ is_abs = 1;
+ val = (i & 0x0F) << 4;
+ len = 8;
+ }
+ table[i].is_abs = is_abs;
+ table[i].val = val;
+ table[i].len = len;
+ table[i].unk = unk;
+ }
+
+ sonix_unknown = 0;
+ init_done = 1;
+}
+
+
+/*
+ sonix_decompress
+ ================
+ decompresses an image encoded by a SN9C101 camera controller chip.
+
+ IN width
+ height
+ inp pointer to compressed frame (with header already stripped)
+ OUT outp pointer to decompressed frame
+
+ Returns 0 if the operation was successful.
+ Returns <0 if operation failed.
+
+*/
+int sonix_decompress(int width, int height, unsigned char *inp, unsigned char *outp)
+{
+ int row, col;
+ int val;
+ int bitpos;
+ unsigned char code;
+ unsigned char *addr;
+
+ if (!init_done) {
+ /* do sonix_decompress_init first! */
+ return -1;
+ }
+
+ bitpos = 0;
+ for (row = 0; row < height; row++) {
+
+ col = 0;
+
+ /* first two pixels in first two rows are stored as raw 8-bit */
+ if (row < 2) {
+ addr = inp + (bitpos >> 3);
+ code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7)));
+ bitpos += 8;
+ *outp++ = code;
+
+ addr = inp + (bitpos >> 3);
+ code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7)));
+ bitpos += 8;
+ *outp++ = code;
+
+ col += 2;
+ }
+
+ while (col < width) {
+ /* get bitcode from bitstream */
+ addr = inp + (bitpos >> 3);
+ code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7)));
+
+ /* update bit position */
+ bitpos += table[code].len;
+
+ /* update code statistics */
+ sonix_unknown += table[code].unk;
+
+ /* calculate pixel value */
+ val = table[code].val;
+ if (!table[code].is_abs) {
+ /* value is relative to top and left pixel */
+ if (col < 2) {
+ /* left column: relative to top pixel */
+ val += outp[-2*width];
+ }
+ else if (row < 2) {
+ /* top row: relative to left pixel */
+ val += outp[-2];
+ }
+ else {
+ /* main area: average of left pixel and top pixel */
+ val += (outp[-2] + outp[-2*width]) / 2;
+ }
+ }
+
+ /* store pixel */
+ *outp++ = CLAMP(val);
+ col++;
+ }
+ }
+
+ return 0;
+}
diff --git a/kopete/libkopete/avdevice/sonix_compress.h b/kopete/libkopete/avdevice/sonix_compress.h
new file mode 100644
index 00000000..509bcb07
--- /dev/null
+++ b/kopete/libkopete/avdevice/sonix_compress.h
@@ -0,0 +1,8 @@
+// Call this function first (just once is needed), before calling sonix_decompress
+void sonix_decompress_init(void);
+
+// decompresses data at inp until a full image of widthxheight has been written to outp
+int sonix_decompress(int width, int height, unsigned char *inp, unsigned char *outp);
+
+// counter to detect presence of currently unknown huffman codes
+extern int sonix_unknown;
diff --git a/kopete/libkopete/avdevice/videocontrol.cpp b/kopete/libkopete/avdevice/videocontrol.cpp
new file mode 100644
index 00000000..f4807c1c
--- /dev/null
+++ b/kopete/libkopete/avdevice/videocontrol.cpp
@@ -0,0 +1,36 @@
+/*
+ videoinput.cpp - Kopete Video Input Class
+
+ Copyright (c) 2007 by Cláudio da Silveira Pinheiro <[email protected]>
+
+ Kopete (c) 2002-2003 by the Kopete developers <[email protected]>
+
+ *************************************************************************
+ * *
+ * This library is free software; you can redistribute it and/or *
+ * modify it under the terms of the GNU Lesser General Public *
+ * License as published by the Free Software Foundation; either *
+ * version 2 of the License, or (at your option) any later version. *
+ * *
+ *************************************************************************
+*/
+
+#include "videocontrol.h"
+
+namespace Kopete {
+
+namespace AV {
+
+VideoControl::VideoControl()
+{
+}
+
+
+VideoControl::~VideoControl()
+{
+}
+
+
+}
+
+}
diff --git a/kopete/libkopete/avdevice/videocontrol.h b/kopete/libkopete/avdevice/videocontrol.h
new file mode 100644
index 00000000..2675be6e
--- /dev/null
+++ b/kopete/libkopete/avdevice/videocontrol.h
@@ -0,0 +1,82 @@
+/*
+ videoinput.cpp - Kopete Video Input Class
+
+ Copyright (c) 2005-2006 by Cláudio da Silveira Pinheiro <[email protected]>
+
+ Kopete (c) 2002-2003 by the Kopete developers <[email protected]>
+
+ *************************************************************************
+ * *
+ * This library is free software; you can redistribute it and/or *
+ * modify it under the terms of the GNU Lesser General Public *
+ * License as published by the Free Software Foundation; either *
+ * version 2 of the License, or (at your option) any later version. *
+ * *
+ *************************************************************************
+*/
+
+#define ENABLE_AV
+
+#ifndef KOPETE_AVVIDEOCONTROL_H
+#define KOPETE_AVVIDEOCONTROL_H
+
+#include <asm/types.h>
+#undef __STRICT_ANSI__
+#ifndef __u64 //required by videodev.h
+#define __u64 unsigned long long
+#endif // __u64
+
+#ifndef __s64 //required by videodev.h
+#define __s64 long long
+#endif // __s64
+
+#include <qstring.h>
+#include <kdebug.h>
+#include <qvaluevector.h>
+#include "kopete_export.h"
+
+namespace Kopete {
+
+namespace AV {
+
+typedef enum
+{
+ CONTROLTYPE_INTEGER = 0,
+ CONTROLTYPE_BOOLEAN = 1,
+ CONTROLTYPE_MENU = 2,
+ CONTROLTYPE_BUTTON = 3
+} control_type;
+
+typedef enum
+{
+ CONTROLFLAG_DISABLED = (1 << 0), // This control is permanently disabled and should be ignored by the application.
+ CONTROLFLAG_GRABBED = (1 << 1), // This control is temporarily unchangeable,
+ CONTROLFLAG_READONLY = (1 << 2), // This control is permanently readable only.
+ CONTROLFLAG__UPDATE = (1 << 3), // Changing this control may affect the value of other controls within the same control class.
+ CONTROLFLAG_INACTIVE = (1 << 4), // This control is not applicable to the current configuration.
+ CONTROLFLAG_SLIDER = (1 << 5) // This control is best represented as a slider.
+} control_flag;
+/**
+ @author Kopete Developers <[email protected]>
+*/
+class VideoControl{
+public:
+ VideoControl();
+ ~VideoControl();
+
+protected:
+ __u32 m_id;
+ control_type m_type;
+ QString m_name;
+ __s32 m_minimum;
+ __s32 m_maximum;
+ __s32 m_step;
+ __s32 m_default;
+ __u32 m_flags;
+};
+
+}
+
+}
+
+#endif
diff --git a/kopete/libkopete/avdevice/videodevice.cpp b/kopete/libkopete/avdevice/videodevice.cpp
new file mode 100644
index 00000000..ada02ae5
--- /dev/null
+++ b/kopete/libkopete/avdevice/videodevice.cpp
@@ -0,0 +1,2752 @@
+/*
+ videodevice.cpp - Kopete Video Device Low-level Support
+
+ Copyright (c) 2005-2006 by Cláudio da Silveira Pinheiro <[email protected]>
+
+ Kopete (c) 2002-2003 by the Kopete developers <[email protected]>
+
+ *************************************************************************
+ * *
+ * This library is free software; you can redistribute it and/or *
+ * modify it under the terms of the GNU Lesser General Public *
+ * License as published by the Free Software Foundation; either *
+ * version 2 of the License, or (at your option) any later version. *
+ * *
+ *************************************************************************
+*/
+
+#define ENABLE_AV
+
+#include <cstdlib>
+#include <cerrno>
+#include <cstring>
+
+#include <kdebug.h>
+
+#include "videoinput.h"
+#include "videodevice.h"
+
+#include "bayer.h"
+#include "sonix_compress.h"
+
+#define CLEAR(x) memset (&(x), 0, sizeof (x))
+
+namespace Kopete {
+
+namespace AV {
+
+VideoDevice::VideoDevice()
+{
+// kdDebug(14010) << "libkopete (avdevice): VideoDevice() called" << endl;
+ descriptor = -1;
+ m_streambuffers = 0;
+ m_current_input = 0;
+// kdDebug(14010) << "libkopete (avdevice): VideoDevice() exited successfuly" << endl;
+ maxwidth = 32767;
+ maxheight = 32767;
+ minwidth = 1;
+ minheight = 1;
+}
+
+
+VideoDevice::~VideoDevice()
+{
+}
+
+
+
+
+
+
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+
+void VideoDevice::enumerateMenu (void)
+{
+ kdDebug(14010) << k_funcinfo << " Menu items:" << endl;
+
+ memset (&querymenu, 0, sizeof (querymenu));
+ querymenu.id = queryctrl.id;
+
+ for (querymenu.index = queryctrl.minimum; querymenu.index <= queryctrl.maximum; querymenu.index++)
+ {
+ if (0 == xioctl (VIDIOC_QUERYMENU, &querymenu))
+ {
+ kdDebug(14010) << k_funcinfo << " " << QString::fromLocal8Bit((const char*)querymenu.name) << endl;
+ }
+ else
+ {
+ perror ("VIDIOC_QUERYMENU");
+ exit (EXIT_FAILURE);
+ }
+ }
+}
+
+
+#endif
+
+
+
+
+
+/*!
+ \fn VideoDevice::xioctl(int fd, int request, void *arg)
+ */
+int VideoDevice::xioctl(int request, void *arg)
+{
+ int r;
+
+ do r = ioctl (descriptor, request, arg);
+ while (-1 == r && EINTR == errno);
+ return r;
+}
+
+/*!
+ \fn VideoDevice::errnoReturn(const char* s)
+ */
+int VideoDevice::errnoReturn(const char* s)
+{
+ /// @todo implement me
+ fprintf (stderr, "%s error %d, %s\n",s, errno, strerror (errno));
+ return EXIT_FAILURE;
+}
+
+/*!
+ \fn VideoDevice::setFileName(QString name)
+ */
+int VideoDevice::setFileName(QString filename)
+{
+ /// @todo implement me
+ full_filename=filename;
+ return EXIT_SUCCESS;
+}
+
+/*!
+ \fn VideoDevice::open()
+ */
+int VideoDevice::open()
+{
+ /// @todo implement me
+
+ kdDebug(14010) << k_funcinfo << "called" << endl;
+ if(-1 != descriptor)
+ {
+ kdDebug(14010) << k_funcinfo << "Device is already open" << endl;
+ return EXIT_SUCCESS;
+ }
+ descriptor = ::open (QFile::encodeName(full_filename), O_RDWR, 0);
+ if(isOpen())
+ {
+ kdDebug(14010) << k_funcinfo << "File " << full_filename << " was opened successfuly" << endl;
+ if(EXIT_FAILURE==checkDevice())
+ {
+ kdDebug(14010) << k_funcinfo << "File " << full_filename << " could not be opened" << endl;
+ close();
+ return EXIT_FAILURE;
+ }
+ }
+ else
+ {
+ kdDebug(14010) << k_funcinfo << "Unable to open file " << full_filename << "Err: "<< errno << endl;
+ return EXIT_FAILURE;
+ }
+
+ initDevice();
+ selectInput(m_current_input);
+ kdDebug(14010) << k_funcinfo << "exited successfuly" << endl;
+ return EXIT_SUCCESS;
+}
+
+bool VideoDevice::isOpen()
+{
+ if(-1 == descriptor)
+ {
+// kdDebug(14010) << k_funcinfo << "VideoDevice::isOpen() File is not open" << endl;
+ return false;
+ }
+// kdDebug(14010) << k_funcinfo << "VideoDevice::isOpen() File is open" << endl;
+ return true;
+}
+
+int VideoDevice::checkDevice()
+{
+ kdDebug(14010) << k_funcinfo << "checkDevice() called." << endl;
+ if(isOpen())
+ {
+ m_videocapture=false;
+ m_videochromakey=false;
+ m_videoscale=false;
+ m_videooverlay=false;
+ m_videoread=false;
+ m_videoasyncio=false;
+ m_videostream=false;
+
+ m_driver=VIDEODEV_DRIVER_NONE;
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+
+//if(!getWorkaroundBrokenDriver())
+{
+ kdDebug(14010) << k_funcinfo << "checkDevice(): " << full_filename << " Trying V4L2 API." << endl;
+ CLEAR(V4L2_capabilities);
+
+ if (-1 != xioctl (VIDIOC_QUERYCAP, &V4L2_capabilities))
+ {
+ if (!(V4L2_capabilities.capabilities & V4L2_CAP_VIDEO_CAPTURE))
+ {
+ kdDebug(14010) << k_funcinfo << "checkDevice(): " << full_filename << " is not a video capture device." << endl;
+ m_driver = VIDEODEV_DRIVER_NONE;
+ return EXIT_FAILURE;
+ }
+ m_videocapture=true;
+ kdDebug(14010) << k_funcinfo << "checkDevice(): " << full_filename << " is a V4L2 device." << endl;
+ m_driver = VIDEODEV_DRIVER_V4L2;
+ m_model=QString::fromLocal8Bit((const char*)V4L2_capabilities.card);
+
+
+// Detect maximum and minimum resolution supported by the V4L2 device
+ CLEAR (fmt);
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == xioctl (VIDIOC_G_FMT, &fmt))
+ kdDebug(14010) << k_funcinfo << "VIDIOC_G_FMT failed (" << errno << ")." << endl;
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ fmt.fmt.pix.width = 32767;
+ fmt.fmt.pix.height = 32767;
+ fmt.fmt.pix.field = V4L2_FIELD_ANY;
+ if (-1 == xioctl (VIDIOC_S_FMT, &fmt))
+ {
+ kdDebug(14010) << k_funcinfo << "Detecting maximum size with VIDIOC_S_FMT failed (" << errno << ").Returned maxwidth: " << pixelFormatName(fmt.fmt.pix.pixelformat) << " " << fmt.fmt.pix.width << "x" << fmt.fmt.pix.height << endl;
+ // Note VIDIOC_S_FMT may change width and height.
+ }
+ else
+ {
+ maxwidth = fmt.fmt.pix.width;
+ maxheight = fmt.fmt.pix.height;
+ }
+ if (-1 == xioctl (VIDIOC_G_FMT, &fmt))
+ kdDebug(14010) << k_funcinfo << "VIDIOC_G_FMT failed (" << errno << ")." << endl;
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ fmt.fmt.pix.width = 1;
+ fmt.fmt.pix.height = 1;
+ fmt.fmt.pix.field = V4L2_FIELD_ANY;
+ if (-1 == xioctl (VIDIOC_S_FMT, &fmt))
+ {
+ kdDebug(14010) << k_funcinfo << "Detecting minimum size with VIDIOC_S_FMT failed (" << errno << ").Returned maxwidth: " << fmt.fmt.pix.width << "x" << fmt.fmt.pix.height << endl;
+ // Note VIDIOC_S_FMT may change width and height.
+ }
+ else
+ {
+ minwidth = fmt.fmt.pix.width;
+ minheight = fmt.fmt.pix.height;
+ }
+
+// Buggy driver paranoia
+/* min = fmt.fmt.pix.width * 2;
+ if (fmt.fmt.pix.bytesperline < min)
+ fmt.fmt.pix.bytesperline = min;
+ min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height;
+ if (fmt.fmt.pix.sizeimage < min)
+ fmt.fmt.pix.sizeimage = min;
+ m_buffer_size=fmt.fmt.pix.sizeimage ;*/
+
+ int inputisok=EXIT_SUCCESS;
+ m_input.clear();
+ for(unsigned int loop=0; inputisok==EXIT_SUCCESS; loop++)
+ {
+ struct v4l2_input videoinput;
+ CLEAR(videoinput);
+ videoinput.index = loop;
+ inputisok=xioctl(VIDIOC_ENUMINPUT, &videoinput);
+ if(inputisok==EXIT_SUCCESS)
+ {
+ VideoInput tempinput;
+ tempinput.name = QString::fromLocal8Bit((const char*)videoinput.name);
+ tempinput.hastuner = videoinput.type & V4L2_INPUT_TYPE_TUNER;
+ tempinput.m_standards = videoinput.std;
+ m_input.push_back(tempinput);
+ kdDebug(14010) << k_funcinfo << "Input " << loop << ": " << tempinput.name << " (tuner: " << ((videoinput.type & V4L2_INPUT_TYPE_TUNER) != 0) << ")" << endl;
+ if((videoinput.type & V4L2_INPUT_TYPE_TUNER) != 0)
+ {
+// _tunerForInput[name] = desc.tuner;
+// _isTuner = true;
+ }
+ else
+ {
+// _tunerForInput[name] = -1;
+ }
+ }
+ }
+
+
+
+
+// -----------------------------------------------------------------------------------------------------------------
+// This must turn up to be a proper method to check for controls' existence.
+CLEAR (queryctrl);
+// v4l2_queryctrl may zero the .id in some cases, even if the IOCTL returns EXIT_SUCCESS (tested with a bttv card, when testing for V4L2_CID_AUDIO_VOLUME).
+// As of 6th Aug 2007, according to the V4L2 specification version 0.21, this behavior is undocumented, and the example 1-8 code found at
+// http://www.linuxtv.org/downloads/video4linux/API/V4L2_API/spec/x519.htm fails because of this behavior with a bttv card.
+
+int currentid = V4L2_CID_BASE;
+
+kdDebug(14010) << k_funcinfo << "Checking CID controls" << endl;
+
+for (currentid = V4L2_CID_BASE; currentid < V4L2_CID_LASTP1; currentid++)
+//for (queryctrl.id = 9963776; queryctrl.id < 9963800; queryctrl.id++)
+{
+ queryctrl.id = currentid;
+//kdDebug(14010) << k_funcinfo << "Checking CID controls from " << V4L2_CID_BASE << " to " << V4L2_CID_LASTP1 << ". Current: " << queryctrl.id << ". IOCTL returns: " << resultado << endl;
+ if (0 == xioctl (VIDIOC_QUERYCTRL, &queryctrl))
+ {
+ if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)
+ continue;
+
+//kdDebug(14010) << k_funcinfo << " Control: " << QString::fromLocal8Bit((const char*)queryctrl.name) << endl;
+kdDebug(14010) << k_funcinfo << " Control: " << QString::fromLocal8Bit((const char*)queryctrl.name) << " Values from " << queryctrl.minimum << " to " << queryctrl.maximum << " with steps of " << queryctrl.step << ". Default: " << queryctrl.default_value << endl;
+
+/* switch (queryctrl.type)
+ {
+ case V4L2_CTRL_TYPE_INTEGER :
+ }*/
+ if (queryctrl.type == V4L2_CTRL_TYPE_MENU)
+ enumerateMenu ();
+ }
+ else
+ {
+ if (errno == EINVAL)
+ continue;
+
+ perror ("VIDIOC_QUERYCTRL");
+// exit (EXIT_FAILURE);
+ }
+}
+
+kdDebug(14010) << k_funcinfo << "Checking CID private controls" << endl;
+
+for (currentid = V4L2_CID_PRIVATE_BASE;; currentid++)
+//for (queryctrl.id = 9963776; queryctrl.id < 9963800; queryctrl.id++)
+{
+ queryctrl.id = currentid;
+//kdDebug(14010) << k_funcinfo << "Checking CID private controls from " << V4L2_CID_PRIVATE_BASE << ". Current: " << queryctrl.id << ". IOCTL returns: " << resultado << endl;
+ if ( 0 == xioctl (VIDIOC_QUERYCTRL, &queryctrl))
+ {
+ if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)
+ continue;
+
+kdDebug(14010) << k_funcinfo << " Control: " << QString::fromLocal8Bit((const char*)queryctrl.name) << " Values from " << queryctrl.minimum << " to " << queryctrl.maximum << " with steps of " << queryctrl.step << ". Default: " << queryctrl.default_value << endl;
+
+ if (queryctrl.type == V4L2_CTRL_TYPE_MENU)
+ enumerateMenu ();
+ }
+ else
+ {
+ if (errno == EINVAL)
+ break;
+
+ perror ("VIDIOC_QUERYCTRL");
+// exit (EXIT_FAILURE);
+ }
+}
+
+
+
+
+ }
+ else
+ {
+// V4L-only drivers should return an EINVAL in errno to indicate they cannot handle V4L2 calls. Not every driver is compliant, so
+// it will try the V4L api even if the error code is different than expected.
+ kdDebug(14010) << k_funcinfo << "checkDevice(): " << full_filename << " is not a V4L2 device." << endl;
+ }
+
+}
+#endif
+
+ CLEAR(V4L_capabilities);
+
+ if(m_driver==VIDEODEV_DRIVER_NONE)
+ {
+ kdDebug(14010) << k_funcinfo << "checkDevice(): " << full_filename << " Trying V4L API." << endl;
+ if (-1 == xioctl (VIDIOCGCAP, &V4L_capabilities))
+ {
+ perror ("ioctl (VIDIOCGCAP)");
+ m_driver = VIDEODEV_DRIVER_NONE;
+ return EXIT_FAILURE;
+ }
+ else
+ {
+ kdDebug(14010) << k_funcinfo << full_filename << " is a V4L device." << endl;
+ m_driver = VIDEODEV_DRIVER_V4L;
+ m_model=QString::fromLocal8Bit((const char*)V4L_capabilities.name);
+ if(V4L_capabilities.type & VID_TYPE_CAPTURE)
+ m_videocapture=true;
+ if(V4L_capabilities.type & VID_TYPE_CHROMAKEY)
+ m_videochromakey=true;
+ if(V4L_capabilities.type & VID_TYPE_SCALES)
+ m_videoscale=true;
+ if(V4L_capabilities.type & VID_TYPE_OVERLAY)
+ m_videooverlay=true;
+// kdDebug(14010) << "libkopete (avdevice): Inputs : " << V4L_capabilities.channels << endl;
+// kdDebug(14010) << "libkopete (avdevice): Audios : " << V4L_capabilities.audios << endl;
+ minwidth = V4L_capabilities.minwidth;
+ maxwidth = V4L_capabilities.maxwidth;
+ minheight = V4L_capabilities.minheight;
+ maxheight = V4L_capabilities.maxheight;
+
+
+ int inputisok=EXIT_SUCCESS;
+ m_input.clear();
+ for(int loop=0; loop < V4L_capabilities.channels; loop++)
+ {
+ struct video_channel videoinput;
+ CLEAR(videoinput);
+ videoinput.channel = loop;
+ videoinput.norm = 1;
+ inputisok=xioctl(VIDIOCGCHAN, &videoinput);
+ if(inputisok==EXIT_SUCCESS)
+ {
+ VideoInput tempinput;
+ tempinput.name = QString::fromLocal8Bit((const char*)videoinput.name);
+ tempinput.hastuner=videoinput.flags & VIDEO_VC_TUNER;
+// TODO: The routine to detect the appropriate video standards for V4L must be placed here
+ m_input.push_back(tempinput);
+// kdDebug(14010) << "libkopete (avdevice): Input " << loop << ": " << tempinput.name << " (tuner: " << ((videoinput.flags & VIDEO_VC_TUNER) != 0) << ")" << endl;
+/* if((input.type & V4L2_INPUT_TYPE_TUNER) != 0)
+ {
+// _tunerForInput[name] = desc.tuner;
+// _isTuner = true;
+ }
+ else
+ {
+// _tunerForInput[name] = -1;
+ }
+*/ }
+ }
+
+ }
+ }
+#endif
+ m_name=m_model; // Take care about changing the name to be different from the model itself...
+
+ detectPixelFormats();
+
+// TODO: Now we must execute the proper initialization according to the type of the driver.
+ kdDebug(14010) << k_funcinfo << "checkDevice() exited successfuly." << endl;
+ return EXIT_SUCCESS;
+ }
+ return EXIT_FAILURE;
+}
+
+
+/*!
+ \fn VideoDevice::showDeviceCapabilities()
+ */
+int VideoDevice::showDeviceCapabilities()
+{
+ kdDebug(14010) << k_funcinfo << "showDeviceCapabilities() called." << endl;
+ if(isOpen())
+ {
+/* kdDebug(14010) << "libkopete (avdevice): Driver: " << (const char*)V4L2_capabilities.driver << " "
+ << ((V4L2_capabilities.version>>16) & 0xFF) << "."
+ << ((V4L2_capabilities.version>> 8) & 0xFF) << "."
+ << ((V4L2_capabilities.version ) & 0xFF) << endl;
+ kdDebug(14010) << "libkopete (avdevice): Card: " << name << endl;
+ kdDebug(14010) << "libkopete (avdevice): Capabilities:" << endl;
+ if(V4L2_capabilities.capabilities & V4L2_CAP_VIDEO_CAPTURE)
+ kdDebug(14010) << "libkopete (avdevice): Video capture" << endl;
+ if(V4L2_capabilities.capabilities & V4L2_CAP_VIDEO_OUTPUT)
+ kdDebug(14010) << "libkopete (avdevice): Video output" << endl;
+ if(V4L2_capabilities.capabilities & V4L2_CAP_VIDEO_OVERLAY)
+ kdDebug(14010) << "libkopete (avdevice): Video overlay" << endl;
+ if(V4L2_capabilities.capabilities & V4L2_CAP_VBI_CAPTURE)
+ kdDebug(14010) << "libkopete (avdevice): VBI capture" << endl;
+ if(V4L2_capabilities.capabilities & V4L2_CAP_VBI_OUTPUT)
+ kdDebug(14010) << "libkopete (avdevice): VBI output" << endl;
+ if(V4L2_capabilities.capabilities & V4L2_CAP_RDS_CAPTURE)
+ kdDebug(14010) << "libkopete (avdevice): RDS capture" << endl;
+ if(V4L2_capabilities.capabilities & V4L2_CAP_TUNER)
+ kdDebug(14010) << "libkopete (avdevice): Tuner IO" << endl;
+ if(V4L2_capabilities.capabilities & V4L2_CAP_AUDIO)
+ kdDebug(14010) << "libkopete (avdevice): Audio IO" << endl;
+;*/
+ kdDebug(14010) << k_funcinfo << "Card model: " << m_model << endl;
+ kdDebug(14010) << k_funcinfo << "Card name : " << m_name << endl;
+ kdDebug(14010) << k_funcinfo << "Capabilities:" << endl;
+ if(canCapture())
+ kdDebug(14010) << k_funcinfo << " Video capture" << endl;
+ if(canRead())
+ kdDebug(14010) << k_funcinfo << " Read" << endl;
+ if(canAsyncIO())
+ kdDebug(14010) << k_funcinfo << " Asynchronous input/output" << endl;
+ if(canStream())
+ kdDebug(14010) << k_funcinfo << " Streaming" << endl;
+ if(canChromakey())
+ kdDebug(14010) << k_funcinfo << " Video chromakey" << endl;
+ if(canScale())
+ kdDebug(14010) << k_funcinfo << " Video scales" << endl;
+ if(canOverlay())
+ kdDebug(14010) << k_funcinfo << " Video overlay" << endl;
+// kdDebug(14010) << "libkopete (avdevice): Audios : " << V4L_capabilities.audios << endl;
+ kdDebug(14010) << k_funcinfo << " Max res: " << maxWidth() << " x " << maxHeight() << endl;
+ kdDebug(14010) << k_funcinfo << " Min res: " << minWidth() << " x " << minHeight() << endl;
+ kdDebug(14010) << k_funcinfo << " Inputs : " << inputs() << endl;
+ for (unsigned int loop=0; loop < inputs(); loop++)
+ kdDebug(14010) << k_funcinfo << "Input " << loop << ": " << m_input[loop].name << " (tuner: " << m_input[loop].hastuner << ")" << endl;
+ kdDebug(14010) << k_funcinfo << "showDeviceCapabilities() exited successfuly." << endl;
+ return EXIT_SUCCESS;
+ }
+ return EXIT_FAILURE;
+}
+
+/*!
+ \fn VideoDevicePool::initDevice()
+ */
+int VideoDevice::initDevice()
+{
+ /// @todo implement me
+ kdDebug(14010) << k_funcinfo << "initDevice() started" << endl;
+ if(-1 == descriptor)
+ {
+ kdDebug(14010) << k_funcinfo << "initDevice() Device is not open" << endl;
+ return EXIT_FAILURE;
+ }
+ m_io_method = IO_METHOD_NONE;
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ if(V4L2_capabilities.capabilities & V4L2_CAP_READWRITE)
+ {
+ m_videoread=true;
+ m_io_method = IO_METHOD_READ;
+ kdDebug(14010) << k_funcinfo << " Read/Write interface" << endl;
+ }
+ if(V4L2_capabilities.capabilities & V4L2_CAP_ASYNCIO)
+ {
+ m_videoasyncio=true;
+ kdDebug(14010) << k_funcinfo << " Async IO interface" << endl;
+ }
+ if(V4L2_capabilities.capabilities & V4L2_CAP_STREAMING)
+ {
+ m_videostream=true;
+ m_io_method = IO_METHOD_MMAP;
+// m_io_method = IO_METHOD_USERPTR;
+ kdDebug(14010) << k_funcinfo << " Streaming interface" << endl;
+ }
+ if(m_io_method==IO_METHOD_NONE)
+ {
+ kdDebug(14010) << k_funcinfo << "initDevice() Found no suitable input/output method for " << full_filename << endl;
+ return EXIT_FAILURE;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ m_videoread=true;
+ m_io_method=IO_METHOD_READ;
+ if(-1 != xioctl(VIDIOCGFBUF,&V4L_videobuffer))
+ {
+// m_videostream=true;
+// m_io_method = IO_METHOD_MMAP;
+ kdDebug(14010) << k_funcinfo << " Streaming interface" << endl;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+
+ break;
+ }
+
+// Select video input, video standard and tune here.
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == xioctl (VIDIOC_CROPCAP, &cropcap))
+ { // Errors ignored.
+ }
+ crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ crop.c = cropcap.defrect; // reset to default
+ if (-1 == xioctl (VIDIOC_S_CROP, &crop))
+ {
+ switch (errno)
+ {
+ case EINVAL: break; // Cropping not supported.
+ default: break; // Errors ignored.
+ }
+ }
+#endif
+#endif
+
+ showDeviceCapabilities();
+ kdDebug(14010) << k_funcinfo << "initDevice() exited successfuly" << endl;
+ return EXIT_SUCCESS;
+}
+
+unsigned int VideoDevice::inputs()
+{
+ return m_input.size();
+}
+
+
+int VideoDevice::width()
+{
+ return currentwidth;
+}
+
+int VideoDevice::minWidth()
+{
+ return minwidth;
+}
+
+int VideoDevice::maxWidth()
+{
+ return maxwidth;
+}
+
+int VideoDevice::height()
+{
+ return currentheight;
+}
+
+int VideoDevice::minHeight()
+{
+ return minheight;
+}
+
+int VideoDevice::maxHeight()
+{
+ return maxheight;
+}
+
+int VideoDevice::setSize( int newwidth, int newheight)
+{
+kdDebug(14010) << k_funcinfo << "setSize(" << newwidth << ", " << newheight << ") called." << endl;
+ if(isOpen())
+ {
+// It should not be there. It must remain in a completely distict place, cause this method should not change the pixelformat.
+ kdDebug(14010) << k_funcinfo << "Trying YUY422P" << endl;
+ if(PIXELFORMAT_NONE == setPixelFormat(PIXELFORMAT_YUV422P))
+ {
+ kdDebug(14010) << k_funcinfo << "Card doesn't seem to support YUV422P format. Trying YUYV." << endl;
+ if(PIXELFORMAT_NONE == setPixelFormat(PIXELFORMAT_YUYV))
+ {
+ kdDebug(14010) << k_funcinfo << "Card doesn't seem to support YUYV format. Trying UYVY." << endl;
+ if(PIXELFORMAT_NONE == setPixelFormat(PIXELFORMAT_UYVY))
+ {
+ kdDebug(14010) << k_funcinfo << "Card doesn't seem to support UYVY format. Trying YUV420P." << endl;
+ if(PIXELFORMAT_NONE == setPixelFormat(PIXELFORMAT_YUV420P))
+ {
+ kdDebug(14010) << k_funcinfo << "Card doesn't seem to support YUV420P format. Trying RGB24." << endl;
+ if(PIXELFORMAT_NONE == setPixelFormat(PIXELFORMAT_RGB24))
+ {
+ kdDebug(14010) << k_funcinfo << "Card doesn't seem to support RGB24 format. Trying BGR24." << endl;
+ if(PIXELFORMAT_NONE == setPixelFormat(PIXELFORMAT_BGR24))
+ {
+ kdDebug(14010) << k_funcinfo << "Card doesn't seem to support RGB24 format. Trying RGB32." << endl;
+ if(PIXELFORMAT_NONE == setPixelFormat(PIXELFORMAT_RGB32))
+ {
+ kdDebug(14010) << k_funcinfo << "Card doesn't seem to support RGB32 format. Trying BGR32." << endl;
+ if(PIXELFORMAT_NONE == setPixelFormat(PIXELFORMAT_BGR32))
+ {
+ kdDebug(14010) << k_funcinfo << "Card doesn't seem to support BGR32 format. Trying SN9C10X." << endl;
+ if(PIXELFORMAT_NONE == setPixelFormat(PIXELFORMAT_SN9C10X))
+ {
+ kdDebug(14010) << k_funcinfo << "Card doesn't seem to support SN9C10X format. Trying Bayer RGB." << endl;
+ if(PIXELFORMAT_NONE == setPixelFormat(PIXELFORMAT_SBGGR8))
+ kdDebug(14010) << k_funcinfo << "Card doesn't seem to support SBGGR8 format. Fallback from it is not yet implemented." << endl;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ if(newwidth > maxwidth ) newwidth = maxwidth;
+ if(newheight > maxheight) newheight = maxheight;
+ if(newwidth < minwidth ) newwidth = minwidth;
+ if(newheight < minheight) newheight = minheight;
+
+ currentwidth = newwidth;
+ currentheight = newheight;
+
+//kdDebug(14010) << k_funcinfo << "width: " << pixelFormatName(fmt.fmt.pix.pixelformat) << " " << width() << "x" << height() << endl;
+// Change resolution for the video device
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+// CLEAR (fmt);
+ if (-1 == xioctl (VIDIOC_G_FMT, &fmt))
+ kdDebug(14010) << k_funcinfo << "VIDIOC_G_FMT failed (" << errno << ").Returned width: " << pixelFormatName(fmt.fmt.pix.pixelformat) << " " << fmt.fmt.pix.width << "x" << fmt.fmt.pix.height << endl;
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ fmt.fmt.pix.width = width();
+ fmt.fmt.pix.height = height();
+ fmt.fmt.pix.field = V4L2_FIELD_ANY;
+ if (-1 == xioctl (VIDIOC_S_FMT, &fmt))
+ {
+ kdDebug(14010) << k_funcinfo << "VIDIOC_S_FMT failed (" << errno << ").Returned width: " << pixelFormatName(fmt.fmt.pix.pixelformat) << " " << fmt.fmt.pix.width << "x" << fmt.fmt.pix.height << endl;
+ // Note VIDIOC_S_FMT may change width and height.
+ }
+ else
+ {
+// Buggy driver paranoia.
+kdDebug(14010) << k_funcinfo << "VIDIOC_S_FMT worked (" << errno << ").Returned width: " << pixelFormatName(fmt.fmt.pix.pixelformat) << " " << fmt.fmt.pix.width << "x" << fmt.fmt.pix.height << endl;
+ unsigned int min = fmt.fmt.pix.width * 2;
+ if (fmt.fmt.pix.bytesperline < min)
+ fmt.fmt.pix.bytesperline = min;
+ min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height;
+ if (fmt.fmt.pix.sizeimage < min)
+ fmt.fmt.pix.sizeimage = min;
+ m_buffer_size=fmt.fmt.pix.sizeimage ;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ {
+ struct video_window V4L_videowindow;
+
+kdDebug(14010) << "------------- width: " << V4L_videowindow.width << " Height: " << V4L_videowindow.height << " Clipcount: " << V4L_videowindow.clipcount << " -----------------" << endl;
+
+ if (xioctl (VIDIOCGWIN, &V4L_videowindow)== -1)
+ {
+ perror ("ioctl VIDIOCGWIN");
+// return (NULL);
+ }
+ V4L_videowindow.width = width();
+ V4L_videowindow.height = height();
+ V4L_videowindow.clipcount=0;
+ if (xioctl (VIDIOCSWIN, &V4L_videowindow)== -1)
+ {
+ perror ("ioctl VIDIOCSWIN");
+// return (NULL);
+ }
+kdDebug(14010) << "------------- width: " << V4L_videowindow.width << " Height: " << V4L_videowindow.height << " Clipcount: " << V4L_videowindow.clipcount << " -----------------" << endl;
+
+// kdDebug(14010) << "libkopete (avdevice): V4L_picture.palette: " << V4L_picture.palette << " Depth: " << V4L_picture.depth << endl;
+
+/* if(-1 == xioctl(VIDIOCGFBUF,&V4L_videobuffer))
+ kdDebug(14010) << "libkopete (avdevice): VIDIOCGFBUF failed (" << errno << "): Card cannot stream" << endl;*/
+
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ break;
+ }
+ m_buffer_size = width() * height() * pixelFormatDepth(m_pixelformat) / 8;
+kdDebug(14010) << "------------------------- ------- -- m_buffer_size: " << m_buffer_size << " !!! -- ------- -----------------------------------------" << endl;
+
+ m_currentbuffer.pixelformat=m_pixelformat;
+ m_currentbuffer.data.resize(m_buffer_size);
+
+ switch (m_io_method)
+ {
+ case IO_METHOD_NONE: break;
+ case IO_METHOD_READ: initRead (); break;
+ case IO_METHOD_MMAP: initMmap (); break;
+ case IO_METHOD_USERPTR: initUserptr (); break;
+ }
+
+kdDebug(14010) << k_funcinfo << "setSize(" << newwidth << ", " << newheight << ") exited successfuly." << endl;
+ return EXIT_SUCCESS;
+ }
+kdDebug(14010) << k_funcinfo << "setSize(" << newwidth << ", " << newheight << ") Device is not open." << endl;
+ return EXIT_FAILURE;
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+pixel_format VideoDevice::setPixelFormat(pixel_format newformat)
+{
+ pixel_format ret = PIXELFORMAT_NONE;
+//kdDebug(14010) << k_funcinfo << "called." << endl;
+// Change the pixel format for the video device
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+// CLEAR (fmt);
+ if (-1 == xioctl (VIDIOC_G_FMT, &fmt))
+ {
+// return errnoReturn ("VIDIOC_S_FMT");
+// kdDebug(14010) << k_funcinfo << "VIDIOC_G_FMT failed (" << errno << ").Returned width: " << pixelFormatName(fmt.fmt.pix.pixelformat) << " " << fmt.fmt.pix.width << "x" << fmt.fmt.pix.height << endl;
+ }
+ else
+ m_pixelformat = pixelFormatForPalette(fmt.fmt.pix.pixelformat);
+
+ fmt.fmt.pix.pixelformat = pixelFormatCode(newformat);
+ if (-1 == xioctl (VIDIOC_S_FMT, &fmt))
+ {
+// kdDebug(14010) << k_funcinfo << "VIDIOC_S_FMT failed (" << errno << ").Returned width: " << pixelFormatName(fmt.fmt.pix.pixelformat) << " " << fmt.fmt.pix.width << "x" << fmt.fmt.pix.height << endl;
+ }
+ else
+ {
+ if (fmt.fmt.pix.pixelformat == pixelFormatCode(newformat))
+ {
+ m_pixelformat = newformat;
+ ret = m_pixelformat;
+ }
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ {
+ struct video_picture V4L_picture;
+ if(-1 == xioctl(VIDIOCGPICT, &V4L_picture))
+ kdDebug(14010) << k_funcinfo << "VIDIOCGPICT failed (" << errno << ")." << endl;
+// kdDebug(14010) << k_funcinfo << "V4L_picture.palette: " << V4L_picture.palette << " Depth: " << V4L_picture.depth << endl;
+ V4L_picture.palette = pixelFormatCode(newformat);
+ V4L_picture.depth = pixelFormatDepth(newformat);
+ if(-1 == xioctl(VIDIOCSPICT,&V4L_picture))
+ {
+// kdDebug(14010) << k_funcinfo << "Card seems to not support " << pixelFormatName(newformat) << " format. Fallback to it is not yet implemented." << endl;
+ }
+
+ if(-1 == xioctl(VIDIOCGPICT, &V4L_picture))
+ kdDebug(14010) << k_funcinfo << "VIDIOCGPICT failed (" << errno << ")." << endl;
+
+// kdDebug(14010) << k_funcinfo << "V4L_picture.palette: " << V4L_picture.palette << " Depth: " << V4L_picture.depth << endl;
+ m_pixelformat=pixelFormatForPalette(V4L_picture.palette);
+ if (m_pixelformat == newformat)
+ ret = newformat;
+
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ break;
+ }
+ return ret;
+}
+
+
+
+
+
+
+/*!
+ \fn Kopete::AV::VideoDevice::currentInput()
+ */
+int VideoDevice::currentInput()
+{
+ /// @todo implement me
+ if(isOpen())
+ {
+ return m_current_input;
+ }
+ return 0;
+}
+
+/*!
+ \fn Kopete::AV::VideoDevice::selectInput(int input)
+ */
+int VideoDevice::selectInput(int newinput)
+{
+ /// @todo implement me
+ if(m_current_input >= inputs())
+ return EXIT_FAILURE;
+
+ if(isOpen())
+ {
+ switch (m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ if (-1 == ioctl (descriptor, VIDIOC_S_INPUT, &newinput))
+ {
+ perror ("VIDIOC_S_INPUT");
+ return EXIT_FAILURE;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ struct video_channel V4L_input;
+ V4L_input.channel=newinput;
+ V4L_input.norm=4; // Hey, it's plain wrong! It should be input's signal standard!
+ if (-1 == ioctl (descriptor, VIDIOCSCHAN, &V4L_input))
+ {
+ perror ("ioctl (VIDIOCSCHAN)");
+ return EXIT_FAILURE;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ break;
+ }
+ kdDebug(14010) << k_funcinfo << "Selected input " << newinput << " (" << m_input[newinput].name << ")" << endl;
+ m_current_input = newinput;
+ setInputParameters();
+ return EXIT_SUCCESS;
+ }
+ return EXIT_FAILURE;
+}
+
+/*!
+ \fn Kopete::AV::VideoDevice::setInputParameters()
+ */
+int VideoDevice::setInputParameters()
+{
+ /// @todo implement me
+ if( (isOpen()) && (m_current_input < inputs() ) )
+ {
+ setBrightness( getBrightness() );
+ setContrast( getContrast() );
+ setSaturation( getSaturation() );
+ setWhiteness( getWhiteness() );
+ setHue( getHue() );
+ return EXIT_SUCCESS;
+ }
+ return EXIT_FAILURE;
+}
+
+/*!
+ \fn VideoDevice::startCapturing()
+ */
+int VideoDevice::startCapturing()
+{
+
+ kdDebug(14010) << k_funcinfo << "called." << endl;
+ if(isOpen())
+ {
+ switch (m_io_method)
+ {
+ case IO_METHOD_NONE: // Card cannot capture frames
+ return EXIT_FAILURE;
+ break;
+ case IO_METHOD_READ: // Nothing to do
+ break;
+ case IO_METHOD_MMAP:
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ {
+ unsigned int loop;
+ for (loop = 0; loop < m_streambuffers; ++loop)
+ {
+ struct v4l2_buffer buf;
+ CLEAR (buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = loop;
+ if (-1 == xioctl (VIDIOC_QBUF, &buf))
+ return errnoReturn ("VIDIOC_QBUF");
+ }
+ enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == xioctl (VIDIOC_STREAMON, &type))
+ return errnoReturn ("VIDIOC_STREAMON");
+ }
+#endif
+#endif
+ break;
+ case IO_METHOD_USERPTR:
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ {
+ unsigned int loop;
+ for (loop = 0; loop < m_streambuffers; ++loop)
+ {
+ struct v4l2_buffer buf;
+ CLEAR (buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_USERPTR;
+ buf.m.userptr = (unsigned long) m_rawbuffers[loop].start;
+ buf.length = m_rawbuffers[loop].length;
+ if (-1 == xioctl (VIDIOC_QBUF, &buf))
+ return errnoReturn ("VIDIOC_QBUF");
+ }
+ enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == xioctl (VIDIOC_STREAMON, &type))
+ return errnoReturn ("VIDIOC_STREAMON");
+ }
+#endif
+#endif
+ break;
+ }
+
+ kdDebug(14010) << k_funcinfo << "exited successfuly." << endl;
+ return EXIT_SUCCESS;
+ }
+ return EXIT_FAILURE;
+}
+
+/*!
+ \fn VideoDevice::getFrame()
+ */
+int VideoDevice::getFrame()
+{
+ /// @todo implement me
+ ssize_t bytesread;
+
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ struct v4l2_buffer v4l2buffer;
+#endif
+#endif
+// kdDebug(14010) << k_funcinfo << "getFrame() called." << endl;
+ if(isOpen())
+ {
+ switch (m_io_method)
+ {
+ case IO_METHOD_NONE: // Card cannot capture frames
+ return EXIT_FAILURE;
+ break;
+ case IO_METHOD_READ:
+// kdDebug(14010) << k_funcinfo << "Using IO_METHOD_READ.File descriptor: " << descriptor << " Buffer address: " << &m_currentbuffer.data[0] << " Size: " << m_currentbuffer.data.size() << endl;
+ bytesread = read (descriptor, &m_currentbuffer.data[0], m_currentbuffer.data.size());
+ if (-1 == bytesread) // must verify this point with ov511 driver.
+ {
+ kdDebug(14010) << k_funcinfo << "IO_METHOD_READ failed." << endl;
+ switch (errno)
+ {
+ case EAGAIN:
+ return EXIT_FAILURE;
+ case EIO: /* Could ignore EIO, see spec. fall through */
+ default:
+ return errnoReturn ("read");
+ }
+ }
+ if((int)m_currentbuffer.data.size() < bytesread)
+ {
+ kdDebug(14010) << k_funcinfo << "IO_METHOD_READ returned less bytes (" << bytesread << ") than it was asked for (" << m_currentbuffer.data.size() <<")." << endl;
+ }
+ break;
+ case IO_METHOD_MMAP:
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ CLEAR (v4l2buffer);
+ v4l2buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ v4l2buffer.memory = V4L2_MEMORY_MMAP;
+ if (-1 == xioctl (VIDIOC_DQBUF, &v4l2buffer))
+ {
+ kdDebug(14010) << k_funcinfo << full_filename << " MMAPed getFrame failed." << endl;
+ switch (errno)
+ {
+ case EAGAIN:
+ {
+ kdDebug(14010) << k_funcinfo << full_filename << " MMAPed getFrame failed: EAGAIN. Pointer: " << endl;
+ return EXIT_FAILURE;
+ }
+ case EIO: /* Could ignore EIO, see spec. fall through */
+ default:
+ return errnoReturn ("VIDIOC_DQBUF");
+ }
+ }
+/* if (v4l2buffer.index < m_streambuffers)
+ return EXIT_FAILURE;*/ //it was an assert()
+//kdDebug(14010) << k_funcinfo << "m_rawbuffers[" << v4l2buffer.index << "].start: " << (void *)m_rawbuffers[v4l2buffer.index].start << " Size: " << m_currentbuffer.data.size() << endl;
+
+
+
+/*{
+ unsigned long long result=0;
+ unsigned long long R=0, G=0, B=0, A=0;
+ int Rmax=0, Gmax=0, Bmax=0, Amax=0;
+ int Rmin=255, Gmin=255, Bmin=255, Amin=0;
+
+ for(unsigned int loop=0;loop < m_currentbuffer.data.size();loop+=4)
+ {
+ R+=m_rawbuffers[v4l2buffer.index].start[loop];
+ G+=m_rawbuffers[v4l2buffer.index].start[loop+1];
+ B+=m_rawbuffers[v4l2buffer.index].start[loop+2];
+// A+=currentbuffer.data[loop+3];
+ if (m_currentbuffer.data[loop] < Rmin) Rmin = m_currentbuffer.data[loop];
+ if (m_currentbuffer.data[loop+1] < Gmin) Gmin = m_currentbuffer.data[loop+1];
+ if (m_currentbuffer.data[loop+2] < Bmin) Bmin = m_currentbuffer.data[loop+2];
+// if (m_currentbuffer.data[loop+3] < Amin) Amin = m_currentbuffer.data[loop+3];
+ if (m_currentbuffer.data[loop] > Rmax) Rmax = m_currentbuffer.data[loop];
+ if (m_currentbuffer.data[loop+1] > Gmax) Gmax = m_currentbuffer.data[loop+1];
+ if (m_currentbuffer.data[loop+2] > Bmax) Bmax = m_currentbuffer.data[loop+2];
+// if (m_currentbuffer.data[loop+3] > Amax) Amax = m_currentbuffer.data[loop+3];
+ }
+ kdDebug(14010) << " R: " << R << " G: " << G << " B: " << B << " A: " << A <<
+ " Rmin: " << Rmin << " Gmin: " << Gmin << " Bmin: " << Bmin << " Amin: " << Amin <<
+ " Rmax: " << Rmax << " Gmax: " << Gmax << " Bmax: " << Bmax << " Amax: " << Amax << endl;
+}*/
+
+
+memcpy(&m_currentbuffer.data[0], m_rawbuffers[v4l2buffer.index].start, m_currentbuffer.data.size());
+ if (-1 == xioctl (VIDIOC_QBUF, &v4l2buffer))
+ return errnoReturn ("VIDIOC_QBUF");
+#endif
+#endif
+ break;
+ case IO_METHOD_USERPTR:
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ {
+ unsigned int i;
+ CLEAR (v4l2buffer);
+ v4l2buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ v4l2buffer.memory = V4L2_MEMORY_USERPTR;
+ if (-1 == xioctl (VIDIOC_DQBUF, &v4l2buffer))
+ {
+ switch (errno)
+ {
+ case EAGAIN:
+ return EXIT_FAILURE;
+ case EIO: /* Could ignore EIO, see spec. fall through */
+ default:
+ return errnoReturn ("VIDIOC_DQBUF");
+ }
+ }
+ for (i = 0; i < m_streambuffers; ++i)
+ if (v4l2buffer.m.userptr == (unsigned long) m_rawbuffers[i].start && v4l2buffer.length == m_rawbuffers[i].length)
+ break;
+ if (i < m_streambuffers)
+ return EXIT_FAILURE;
+ if (-1 == xioctl (VIDIOC_QBUF, &v4l2buffer))
+ return errnoReturn ("VIDIOC_QBUF");
+ }
+#endif
+#endif
+ break;
+ }
+
+/* Automatic color correction. Now it just swaps R and B channels in RGB24/BGR24 modes.
+ if(m_input[m_current_input].getAutoColorCorrection())
+ {
+ switch(m_currentbuffer.pixelformat)
+ {
+ case PIXELFORMAT_NONE : break;
+ case PIXELFORMAT_GREY : break;
+ case PIXELFORMAT_RGB332 : break;
+ case PIXELFORMAT_RGB555 : break;
+ case PIXELFORMAT_RGB555X: break;
+ case PIXELFORMAT_RGB565 : break;
+ case PIXELFORMAT_RGB565X: break;
+ case PIXELFORMAT_RGB24 :
+ case PIXELFORMAT_BGR24 :
+ {
+ unsigned char temp;
+ for(unsigned int loop=0;loop < m_currentbuffer.data.size();loop+=3)
+ {
+ temp = m_currentbuffer.data[loop];
+ m_currentbuffer.data[loop] = m_currentbuffer.data[loop+2];
+ m_currentbuffer.data[loop+2] = temp;
+ }
+ }
+ break;
+ case PIXELFORMAT_RGB32 :
+ case PIXELFORMAT_BGR32 :
+ {
+ unsigned char temp;
+ for(unsigned int loop=0;loop < m_currentbuffer.data.size();loop+=4)
+ {
+ temp = m_currentbuffer.data[loop];
+ m_currentbuffer.data[loop] = m_currentbuffer.data[loop+2];
+ m_currentbuffer.data[loop+2] = temp;
+ }
+ }
+ break;
+ case PIXELFORMAT_YUYV : break;
+ case PIXELFORMAT_UYVY : break;
+ case PIXELFORMAT_YUV420P: break;
+ case PIXELFORMAT_YUV422P: break;
+ }
+ }*/
+//kdDebug(14010) << k_funcinfo << "10 Using IO_METHOD_READ.File descriptor: " << descriptor << " Buffer address: " << &m_currentbuffer.data[0] << " Size: " << m_currentbuffer.data.size() << endl;
+
+
+// put frame copy operation here
+// kdDebug(14010) << k_funcinfo << "exited successfuly." << endl;
+ return EXIT_SUCCESS;
+ }
+ return EXIT_FAILURE;
+}
+
+/*!
+ \fn VideoDevice::getFrame(imagebuffer *imgbuffer)
+ */
+int VideoDevice::getFrame(imagebuffer *imgbuffer)
+{
+ if(imgbuffer)
+ {
+ getFrame();
+ imgbuffer->height = m_currentbuffer.height;
+ imgbuffer->width = m_currentbuffer.width;
+ imgbuffer->pixelformat = m_currentbuffer.pixelformat;
+ imgbuffer->data = m_currentbuffer.data;
+ return EXIT_SUCCESS;
+ }
+ return EXIT_FAILURE;
+}
+
+/*!
+ \fn Kopete::AV::VideoDevice::getImage(const QImage *qimage)
+ */
+int VideoDevice::getImage(QImage *qimage)
+{
+ /// @todo implement me
+
+ // do NOT delete qimage here, as it is received as a parameter
+ if (qimage->width() != width() || qimage->height() != height())
+ qimage->create(width(), height(),32, QImage::IgnoreEndian);
+
+ uchar *bits=qimage->bits();
+// kDebug() << "Capturing in " << pixelFormatName(m_currentbuffer.pixelformat);
+ switch(m_currentbuffer.pixelformat)
+ {
+ case PIXELFORMAT_NONE : break;
+
+// Packed RGB formats
+ case PIXELFORMAT_RGB332 : break;
+ case PIXELFORMAT_RGB444 : break;
+ case PIXELFORMAT_RGB555 : break;
+ case PIXELFORMAT_RGB565 :
+ {
+ int step=0;
+ for(int loop=0;loop < qimage->numBytes();loop+=4)
+ {
+ bits[loop] = (m_currentbuffer.data[step]<<3)+(m_currentbuffer.data[step]<<3>>5);
+ bits[loop+1] = ((m_currentbuffer.data[step+1])<<5)|m_currentbuffer.data[step]>>5;
+ bits[loop+2] = ((m_currentbuffer.data[step+1])&248)+((m_currentbuffer.data[step+1])>>5);
+ bits[loop+3] = 255;
+ step+=2;
+ }
+ }
+ break;
+ case PIXELFORMAT_RGB555X: break;
+ case PIXELFORMAT_RGB565X: break;
+ case PIXELFORMAT_BGR24 :
+ {
+ int step=0;
+ for(int loop=0;loop < qimage->numBytes();loop+=4)
+ {
+ bits[loop] = m_currentbuffer.data[step+2];
+ bits[loop+1] = m_currentbuffer.data[step+1];
+ bits[loop+2] = m_currentbuffer.data[step];
+ bits[loop+3] = 255;
+ step+=3;
+ }
+ }
+ break;
+ case PIXELFORMAT_RGB24 :
+ {
+ int step=0;
+ for(int loop=0;loop < qimage->numBytes();loop+=4)
+ {
+ bits[loop] = m_currentbuffer.data[step];
+ bits[loop+1] = m_currentbuffer.data[step+1];
+ bits[loop+2] = m_currentbuffer.data[step+2];
+ bits[loop+3] = 255;
+ step+=3;
+ }
+ }
+ break;
+ case PIXELFORMAT_BGR32 : break;
+ case PIXELFORMAT_RGB32 : memcpy(bits,&m_currentbuffer.data[0], m_currentbuffer.data.size());
+ break;
+
+// Bayer RGB format
+ case PIXELFORMAT_SBGGR8 :
+ {
+ unsigned char *d = (unsigned char *) malloc (width() * height() * 3);
+ bayer2rgb24(d, &m_currentbuffer.data.first(), width(), height());
+ int step=0;
+ for(int loop=0;loop < qimage->numBytes();loop+=4)
+ {
+ bits[loop] = d[step+2];
+ bits[loop+1] = d[step+1];
+ bits[loop+2] = d[step];
+ bits[loop+3] = 255;
+ step+=3;
+ }
+ free(d);
+ }
+ break;
+
+// YUV formats
+ case PIXELFORMAT_GREY : break;
+ case PIXELFORMAT_YUYV:
+ case PIXELFORMAT_UYVY:
+ case PIXELFORMAT_YUV420P:
+ case PIXELFORMAT_YUV422P:
+ {
+ uchar *yptr, *cbptr, *crptr;
+ bool halfheight=false;
+ bool packed=false;
+// Adjust algorythm to specific YUV data arrangements.
+ if (m_currentbuffer.pixelformat == PIXELFORMAT_YUV420P)
+ halfheight=true;
+ if (m_currentbuffer.pixelformat == PIXELFORMAT_YUYV)
+ {
+ yptr = &m_currentbuffer.data[0];
+ cbptr = yptr + 1;
+ crptr = yptr + 3;
+ packed=true;
+ }
+ else if (m_currentbuffer.pixelformat == PIXELFORMAT_UYVY)
+ {
+ cbptr = &m_currentbuffer.data[0];
+ yptr = cbptr + 1;
+ crptr = cbptr + 2;
+ packed=true;
+ }
+ else
+ {
+ yptr = &m_currentbuffer.data[0];
+ cbptr = yptr + (width()*height());
+ crptr = cbptr + (width()*height()/(halfheight ? 4:2));
+ }
+
+ for(int y=0; y<height(); y++)
+ {
+// Decode scanline
+ for(int x=0; x<width(); x++)
+ {
+ int c,d,e;
+
+ if (packed)
+ {
+ c = (yptr[x<<1])-16;
+ d = (cbptr[x>>1<<2])-128;
+ e = (crptr[x>>1<<2])-128;
+ }
+ else
+ {
+ c = (yptr[x])-16;
+ d = (cbptr[x>>1])-128;
+ e = (crptr[x>>1])-128;
+ }
+
+ int r = (298 * c + 409 * e + 128)>>8;
+ int g = (298 * c - 100 * d - 208 * e + 128)>>8;
+ int b = (298 * c + 516 * d + 128)>>8;
+
+ if (r<0) r=0; if (r>255) r=255;
+ if (g<0) g=0; if (g>255) g=255;
+ if (b<0) b=0; if (b>255) b=255;
+
+ uint *p = (uint*)qimage->scanLine(y)+x;
+ *p = qRgba(r,g,b,255);
+
+ }
+// Jump to next line
+ if (packed)
+ {
+ yptr+=width()*2;
+ cbptr+=width()*2;
+ crptr+=width()*2;
+ }
+ else
+ {
+ yptr+=width();
+ if (!halfheight || y&1)
+ {
+ cbptr+=width()/2;
+ crptr+=width()/2;
+ }
+ }
+ }
+ }
+ break;
+
+// Compressed formats
+ case PIXELFORMAT_JPEG : break;
+ case PIXELFORMAT_MPEG : break;
+
+// Reserved formats
+ case PIXELFORMAT_DV : break;
+ case PIXELFORMAT_ET61X251:break;
+ case PIXELFORMAT_HI240 : break;
+ case PIXELFORMAT_HM12 : break;
+ case PIXELFORMAT_MJPEG : break;
+ case PIXELFORMAT_PWC1 : break;
+ case PIXELFORMAT_PWC2 : break;
+ case PIXELFORMAT_SN9C10X:
+ {
+ unsigned char *s = new unsigned char [width() * height()];
+ unsigned char *d = new unsigned char [width() * height() * 3];
+ sonix_decompress_init();
+ sonix_decompress(width(), height(), &m_currentbuffer.data.first(), s);
+ bayer2rgb24(d, s, width(), height());
+ int step=0;
+ for(int loop=0;loop < qimage->numBytes();loop+=4)
+ {
+ bits[loop] = d[step+2];
+ bits[loop+1] = d[step+1];
+ bits[loop+2] = d[step];
+ bits[loop+3] = 255;
+ step+=3;
+ }
+ delete[] s;
+ delete[] d;
+ }
+ break;
+ case PIXELFORMAT_WNVA : break;
+ case PIXELFORMAT_YYUV : break;
+ }
+ return EXIT_SUCCESS;
+}
+
+/*!
+ \fn VideoDevice::stopCapturing()
+ */
+int VideoDevice::stopCapturing()
+{
+ /// @todo implement me
+ kdDebug(14010) << k_funcinfo << "called." << endl;
+ if(isOpen())
+ {
+ switch (m_io_method)
+ {
+ case IO_METHOD_NONE: // Card cannot capture frames
+ return EXIT_FAILURE;
+ break;
+ case IO_METHOD_READ: // Nothing to do
+ break;
+ case IO_METHOD_MMAP:
+ case IO_METHOD_USERPTR:
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ {
+ enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == xioctl (VIDIOC_STREAMOFF, &type))
+ return errnoReturn ("VIDIOC_STREAMOFF");
+
+ if (m_io_method == IO_METHOD_MMAP)
+ {
+ unsigned int loop;
+ for (loop = 0; loop < m_streambuffers; ++loop)
+ {
+ if (munmap(m_rawbuffers[loop].start,m_rawbuffers[loop].length) != 0)
+ {
+ kdDebug(14010) << k_funcinfo << "unable to munmap." << endl;
+ }
+ }
+ }
+ }
+#endif
+ break;
+ }
+ kdDebug(14010) << k_funcinfo << "exited successfuly." << endl;
+ return EXIT_SUCCESS;
+ }
+ return EXIT_FAILURE;
+}
+
+
+/*!
+ \fn VideoDevice::close()
+ */
+int VideoDevice::close()
+{
+ /// @todo implement me
+ kdDebug(14010) << k_funcinfo << " called." << endl;
+ if(isOpen())
+ {
+ kdDebug(14010) << k_funcinfo << " Device is open. Trying to properly shutdown the device." << endl;
+ stopCapturing();
+ kdDebug(14010) << k_funcinfo << "::close() returns " << ::close(descriptor) << endl;
+ }
+ descriptor = -1;
+ return EXIT_SUCCESS;
+}
+
+float VideoDevice::getBrightness()
+{
+ if (m_current_input < m_input.size() )
+ return m_input[m_current_input].getBrightness();
+ else
+ return 0;
+}
+
+float VideoDevice::setBrightness(float brightness)
+{
+ kdDebug(14010) << k_funcinfo << " called." << endl;
+ m_input[m_current_input].setBrightness(brightness); // Just to check bounds
+
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ {
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ CLEAR (queryctrl);
+ queryctrl.id = V4L2_CID_BRIGHTNESS;
+
+ if (-1 == xioctl (VIDIOC_QUERYCTRL, &queryctrl))
+ {
+ if (errno != EINVAL)
+ {
+ kdDebug(14010) << k_funcinfo << "VIDIOC_QUERYCTRL failed (" << errno << ")." << endl;
+ } else
+ {
+ kdDebug(14010) << k_funcinfo << "Device doesn't support the Brightness control." << endl;
+ }
+ } else
+ if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)
+ {
+ kdDebug(14010) << k_funcinfo << "Device doesn't support the Brightness control." << endl;
+ } else
+ {
+ CLEAR (control);
+ control.id = V4L2_CID_BRIGHTNESS;
+ control.value = (__s32)((queryctrl.maximum - queryctrl.minimum)*getBrightness());
+
+ if (-1 == xioctl (VIDIOC_S_CTRL, &control))
+ {
+ kdDebug(14010) << k_funcinfo << "VIDIOC_S_CTRL failed (" << errno << ")." << endl;
+ }
+ }
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ {
+ struct video_picture V4L_picture;
+ if(-1 == xioctl(VIDIOCGPICT, &V4L_picture))
+ kdDebug(14010) << k_funcinfo << "VIDIOCGPICT failed (" << errno << ")." << endl;
+ V4L_picture.brightness = uint(65535*getBrightness());
+ if(-1 == xioctl(VIDIOCSPICT,&V4L_picture))
+ kdDebug(14010) << k_funcinfo << "Card seems to not support adjusting image brightness. Fallback to it is not yet implemented." << endl;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ break;
+ }
+ return getBrightness();
+}
+
+float VideoDevice::getContrast()
+{
+ if (m_current_input < m_input.size() )
+ return m_input[m_current_input].getContrast();
+ else
+ return 0;
+}
+
+float VideoDevice::setContrast(float contrast)
+{
+ kdDebug(14010) << k_funcinfo << " called." << endl;
+ m_input[m_current_input].setContrast(contrast); // Just to check bounds
+
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ {
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ CLEAR (queryctrl);
+ queryctrl.id = V4L2_CID_CONTRAST;
+
+ if (-1 == xioctl (VIDIOC_QUERYCTRL, &queryctrl))
+ {
+ if (errno != EINVAL)
+ {
+ kdDebug(14010) << k_funcinfo << "VIDIOC_QUERYCTRL failed (" << errno << ")." << endl;
+ } else
+ {
+ kdDebug(14010) << k_funcinfo << "Device doesn't support the Contrast control." << endl;
+ }
+ } else
+ if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)
+ {
+ kdDebug(14010) << k_funcinfo << "Device doesn't support the Contrast control." << endl;
+ } else
+ {
+ CLEAR (control);
+ control.id = V4L2_CID_CONTRAST;
+ control.value = (__s32)((queryctrl.maximum - queryctrl.minimum)*getContrast());
+
+ if (-1 == xioctl (VIDIOC_S_CTRL, &control))
+ {
+ kdDebug(14010) << k_funcinfo << "VIDIOC_S_CTRL failed (" << errno << ")." << endl;
+ }
+ }
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ {
+ struct video_picture V4L_picture;
+ if(-1 == xioctl(VIDIOCGPICT, &V4L_picture))
+ kdDebug(14010) << k_funcinfo << "VIDIOCGPICT failed (" << errno << ")." << endl;
+ V4L_picture.contrast = uint(65535*getContrast());
+ if(-1 == xioctl(VIDIOCSPICT,&V4L_picture))
+ kdDebug(14010) << k_funcinfo << "Card seems to not support adjusting image contrast. Fallback to it is not yet implemented." << endl;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ break;
+ }
+ return getContrast();
+}
+
+float VideoDevice::getSaturation()
+{
+ if (m_current_input < m_input.size() )
+ return m_input[m_current_input].getSaturation();
+ else
+ return 0;
+}
+
+float VideoDevice::setSaturation(float saturation)
+{
+ kdDebug(14010) << k_funcinfo << " called." << endl;
+ m_input[m_current_input].setSaturation(saturation); // Just to check bounds
+
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ {
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ CLEAR (queryctrl);
+ queryctrl.id = V4L2_CID_SATURATION;
+
+ if (-1 == xioctl (VIDIOC_QUERYCTRL, &queryctrl))
+ {
+ if (errno != EINVAL)
+ {
+ kdDebug(14010) << k_funcinfo << "VIDIOC_QUERYCTRL failed (" << errno << ")." << endl;
+ } else
+ {
+ kdDebug(14010) << k_funcinfo << "Device doesn't support the Saturation control." << endl;
+ }
+ } else
+ if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)
+ {
+ kdDebug(14010) << k_funcinfo << "Device doesn't support the Saturation control." << endl;
+ } else
+ {
+ CLEAR (control);
+ control.id = V4L2_CID_SATURATION;
+ control.value = (__s32)((queryctrl.maximum - queryctrl.minimum)*getSaturation());
+
+ if (-1 == xioctl (VIDIOC_S_CTRL, &control))
+ {
+ kdDebug(14010) << k_funcinfo << "VIDIOC_S_CTRL failed (" << errno << ")." << endl;
+ }
+ }
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ {
+ struct video_picture V4L_picture;
+ if(-1 == xioctl(VIDIOCGPICT, &V4L_picture))
+ kdDebug(14010) << k_funcinfo << "VIDIOCGPICT failed (" << errno << ")." << endl;
+ V4L_picture.colour = uint(65535*getSaturation());
+ if(-1 == xioctl(VIDIOCSPICT,&V4L_picture))
+ kdDebug(14010) << k_funcinfo << "Card seems to not support adjusting image saturation. Fallback to it is not yet implemented." << endl;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ break;
+ }
+ return getSaturation();
+}
+
+float VideoDevice::getWhiteness()
+{
+ if (m_current_input < m_input.size() )
+ return m_input[m_current_input].getWhiteness();
+ else
+ return 0;
+}
+
+float VideoDevice::setWhiteness(float whiteness)
+{
+ kdDebug(14010) << k_funcinfo << " called." << endl;
+ m_input[m_current_input].setWhiteness(whiteness); // Just to check bounds
+
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ {
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ CLEAR (queryctrl);
+ queryctrl.id = V4L2_CID_WHITENESS;
+
+ if (-1 == xioctl (VIDIOC_QUERYCTRL, &queryctrl))
+ {
+ if (errno != EINVAL)
+ {
+ kdDebug(14010) << k_funcinfo << "VIDIOC_QUERYCTRL failed (" << errno << ")." << endl;
+ } else
+ {
+ kdDebug(14010) << k_funcinfo << "Device doesn't support the Whiteness control." << endl;
+ }
+ } else
+ if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)
+ {
+ kdDebug(14010) << k_funcinfo << "Device doesn't support the Whiteness control." << endl;
+ } else
+ {
+ CLEAR (control);
+ control.id = V4L2_CID_WHITENESS;
+ control.value = (__s32)((queryctrl.maximum - queryctrl.minimum)*getWhiteness());
+
+ if (-1 == xioctl (VIDIOC_S_CTRL, &control))
+ {
+ kdDebug(14010) << k_funcinfo << "VIDIOC_S_CTRL failed (" << errno << ")." << endl;
+ }
+ }
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ {
+ struct video_picture V4L_picture;
+ if(-1 == xioctl(VIDIOCGPICT, &V4L_picture))
+ kdDebug(14010) << k_funcinfo << "VIDIOCGPICT failed (" << errno << ")." << endl;
+ V4L_picture.whiteness = uint(65535*getWhiteness());
+ if(-1 == xioctl(VIDIOCSPICT,&V4L_picture))
+ kdDebug(14010) << k_funcinfo << "Card seems to not support adjusting white level. Fallback to it is not yet implemented." << endl;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ break;
+ }
+ return getWhiteness();
+}
+
+float VideoDevice::getHue()
+{
+ if (m_current_input < m_input.size() )
+ return m_input[m_current_input].getHue();
+ else
+ return 0;
+}
+
+float VideoDevice::setHue(float hue)
+{
+ kdDebug(14010) << k_funcinfo << " called." << endl;
+ m_input[m_current_input].setHue(hue); // Just to check bounds
+
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ {
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ CLEAR (queryctrl);
+ queryctrl.id = V4L2_CID_HUE;
+
+ if (-1 == xioctl (VIDIOC_QUERYCTRL, &queryctrl))
+ {
+ if (errno != EINVAL)
+ {
+ kdDebug(14010) << k_funcinfo << "VIDIOC_QUERYCTRL failed (" << errno << ")." << endl;
+ } else
+ {
+ kdDebug(14010) << k_funcinfo << "Device doesn't support the Hue control." << endl;
+ }
+ } else
+ if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)
+ {
+ kdDebug(14010) << k_funcinfo << "Device doesn't support the Hue control." << endl;
+ } else
+ {
+ CLEAR (control);
+ control.id = V4L2_CID_HUE;
+ control.value = (__s32)((queryctrl.maximum - queryctrl.minimum)*getHue());
+
+ if (-1 == xioctl (VIDIOC_S_CTRL, &control))
+ {
+ kdDebug(14010) << k_funcinfo << "VIDIOC_S_CTRL failed (" << errno << ")." << endl;
+ }
+ }
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ {
+ struct video_picture V4L_picture;
+ if(-1 == xioctl(VIDIOCGPICT, &V4L_picture))
+ kdDebug(14010) << k_funcinfo << "VIDIOCGPICT failed (" << errno << ")." << endl;
+ V4L_picture.hue = uint(65535*getHue());
+ if(-1 == xioctl(VIDIOCSPICT,&V4L_picture))
+ kdDebug(14010) << k_funcinfo << "Card seems to not support adjusting image hue. Fallback to it is not yet implemented." << endl;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ break;
+ }
+ return getHue();
+}
+
+
+bool VideoDevice::getAutoBrightnessContrast()
+{
+ if (m_current_input < m_input.size() )
+ return m_input[m_current_input].getAutoBrightnessContrast();
+ else
+ return false;
+}
+
+bool VideoDevice::setAutoBrightnessContrast(bool brightnesscontrast)
+{
+ kdDebug(14010) << k_funcinfo << "VideoDevice::setAutoBrightnessContrast(" << brightnesscontrast << ") called." << endl;
+ if (m_current_input < m_input.size() )
+ {
+ m_input[m_current_input].setAutoBrightnessContrast(brightnesscontrast);
+ return m_input[m_current_input].getAutoBrightnessContrast();
+ }
+ else
+ return false;
+
+}
+
+bool VideoDevice::getAutoColorCorrection()
+{
+ if (m_current_input < m_input.size() )
+ return m_input[m_current_input].getAutoColorCorrection();
+ else
+ return false;
+}
+
+bool VideoDevice::setAutoColorCorrection(bool colorcorrection)
+{
+ kdDebug(14010) << k_funcinfo << "VideoDevice::setAutoColorCorrection(" << colorcorrection << ") called." << endl;
+ if (m_current_input < m_input.size() )
+ {
+ m_input[m_current_input].setAutoColorCorrection(colorcorrection);
+ return m_input[m_current_input].getAutoColorCorrection();
+ }
+ else
+ return false;
+}
+
+bool VideoDevice::getImageAsMirror()
+{
+ if (m_current_input < m_input.size() )
+ return m_input[m_current_input].getImageAsMirror();
+ else
+ return false;
+}
+
+bool VideoDevice::setImageAsMirror(bool imageasmirror)
+{
+ kdDebug(14010) << k_funcinfo << "VideoDevice::setImageAsMirror(" << imageasmirror << ") called." << endl;
+ if (m_current_input < m_input.size() )
+ {
+ m_input[m_current_input].setImageAsMirror(imageasmirror);
+ return m_input[m_current_input].getImageAsMirror();
+ }
+ else
+ return false;
+}
+
+pixel_format VideoDevice::pixelFormatForPalette( int palette )
+{
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ switch(palette)
+ {
+ case 0 : return PIXELFORMAT_NONE; break;
+
+// Packed RGB formats
+ case V4L2_PIX_FMT_RGB332 : return PIXELFORMAT_RGB332; break;
+#if defined( V4L2_PIX_FMT_RGB444 )
+ case V4L2_PIX_FMT_RGB444 : return PIXELFORMAT_RGB444; break;
+#endif
+ case V4L2_PIX_FMT_RGB555 : return PIXELFORMAT_RGB555; break;
+ case V4L2_PIX_FMT_RGB565 : return PIXELFORMAT_RGB565; break;
+ case V4L2_PIX_FMT_RGB555X : return PIXELFORMAT_RGB555X; break;
+ case V4L2_PIX_FMT_RGB565X : return PIXELFORMAT_RGB565X; break;
+ case V4L2_PIX_FMT_BGR24 : return PIXELFORMAT_BGR24; break;
+ case V4L2_PIX_FMT_RGB24 : return PIXELFORMAT_RGB24; break;
+ case V4L2_PIX_FMT_BGR32 : return PIXELFORMAT_BGR32; break;
+ case V4L2_PIX_FMT_RGB32 : return PIXELFORMAT_RGB32; break;
+
+// Bayer RGB format
+ case V4L2_PIX_FMT_SBGGR8 : return PIXELFORMAT_SBGGR8; break;
+
+// YUV formats
+ case V4L2_PIX_FMT_GREY : return PIXELFORMAT_GREY; break;
+ case V4L2_PIX_FMT_YUYV : return PIXELFORMAT_YUYV; break;
+ case V4L2_PIX_FMT_UYVY : return PIXELFORMAT_UYVY; break;
+ case V4L2_PIX_FMT_YUV420 : return PIXELFORMAT_YUV420P; break;
+ case V4L2_PIX_FMT_YUV422P : return PIXELFORMAT_YUV422P; break;
+
+// Compressed formats
+ case V4L2_PIX_FMT_JPEG : return PIXELFORMAT_JPEG; break;
+ case V4L2_PIX_FMT_MPEG : return PIXELFORMAT_MPEG; break;
+
+// Reserved formats
+ case V4L2_PIX_FMT_DV : return PIXELFORMAT_DV; break;
+ case V4L2_PIX_FMT_ET61X251 : return PIXELFORMAT_ET61X251; break;
+ case V4L2_PIX_FMT_HI240 : return PIXELFORMAT_HI240; break;
+#if defined( V4L2_PIX_FMT_HM12 )
+ case V4L2_PIX_FMT_HM12 : return PIXELFORMAT_HM12; break;
+#endif
+ case V4L2_PIX_FMT_MJPEG : return PIXELFORMAT_MJPEG; break;
+ case V4L2_PIX_FMT_PWC1 : return PIXELFORMAT_PWC1; break;
+ case V4L2_PIX_FMT_PWC2 : return PIXELFORMAT_PWC2; break;
+ case V4L2_PIX_FMT_SN9C10X : return PIXELFORMAT_SN9C10X; break;
+ case V4L2_PIX_FMT_WNVA : return PIXELFORMAT_WNVA; break;
+ case V4L2_PIX_FMT_YYUV : return PIXELFORMAT_YYUV; break;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ switch(palette)
+ {
+ case 0 : return PIXELFORMAT_NONE; break;
+ case VIDEO_PALETTE_GREY : return PIXELFORMAT_GREY; break;
+ case VIDEO_PALETTE_HI240 : return PIXELFORMAT_RGB332; break;
+ case VIDEO_PALETTE_RGB555 : return PIXELFORMAT_RGB555; break;
+ case VIDEO_PALETTE_RGB565 : return PIXELFORMAT_RGB565; break;
+ case VIDEO_PALETTE_RGB24 : return PIXELFORMAT_RGB24; break;
+ case VIDEO_PALETTE_RGB32 : return PIXELFORMAT_RGB32; break;
+ case VIDEO_PALETTE_YUYV : return PIXELFORMAT_YUYV; break;
+ case VIDEO_PALETTE_UYVY : return PIXELFORMAT_UYVY; break;
+ case VIDEO_PALETTE_YUV420 :
+ case VIDEO_PALETTE_YUV420P : return PIXELFORMAT_YUV420P; break;
+ case VIDEO_PALETTE_YUV422P : return PIXELFORMAT_YUV422P; break;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ return PIXELFORMAT_NONE; break;
+ }
+ return PIXELFORMAT_NONE;
+}
+
+int VideoDevice::pixelFormatCode(pixel_format pixelformat)
+{
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ switch(pixelformat)
+ {
+ case PIXELFORMAT_NONE : return 0; break;
+
+// Packed RGB formats
+ case PIXELFORMAT_RGB332 : return V4L2_PIX_FMT_RGB332; break;
+#if defined( V4L2_PIX_FMT_RGB444 )
+ case PIXELFORMAT_RGB444 : return V4L2_PIX_FMT_RGB444; break;
+#endif
+ case PIXELFORMAT_RGB555 : return V4L2_PIX_FMT_RGB555; break;
+ case PIXELFORMAT_RGB565 : return V4L2_PIX_FMT_RGB565; break;
+ case PIXELFORMAT_RGB555X: return V4L2_PIX_FMT_RGB555X; break;
+ case PIXELFORMAT_RGB565X: return V4L2_PIX_FMT_RGB565X; break;
+ case PIXELFORMAT_BGR24 : return V4L2_PIX_FMT_BGR24; break;
+ case PIXELFORMAT_RGB24 : return V4L2_PIX_FMT_RGB24; break;
+ case PIXELFORMAT_BGR32 : return V4L2_PIX_FMT_BGR32; break;
+ case PIXELFORMAT_RGB32 : return V4L2_PIX_FMT_RGB32; break;
+
+// Bayer RGB format
+ case PIXELFORMAT_SBGGR8 : return V4L2_PIX_FMT_SBGGR8; break;
+
+// YUV formats
+ case PIXELFORMAT_GREY : return V4L2_PIX_FMT_GREY; break;
+ case PIXELFORMAT_YUYV : return V4L2_PIX_FMT_YUYV; break;
+ case PIXELFORMAT_UYVY : return V4L2_PIX_FMT_UYVY; break;
+ case PIXELFORMAT_YUV420P: return V4L2_PIX_FMT_YUV420; break;
+ case PIXELFORMAT_YUV422P: return V4L2_PIX_FMT_YUV422P; break;
+
+// Compressed formats
+ case PIXELFORMAT_JPEG : return V4L2_PIX_FMT_JPEG; break;
+ case PIXELFORMAT_MPEG : return V4L2_PIX_FMT_MPEG; break;
+
+// Reserved formats
+ case PIXELFORMAT_DV : return V4L2_PIX_FMT_DV; break;
+ case PIXELFORMAT_ET61X251:return V4L2_PIX_FMT_ET61X251;break;
+ case PIXELFORMAT_HI240 : return V4L2_PIX_FMT_HI240; break;
+#if defined( V4L2_PIX_FMT_HM12 )
+ case PIXELFORMAT_HM12 : return V4L2_PIX_FMT_HM12; break;
+#endif
+ case PIXELFORMAT_MJPEG : return V4L2_PIX_FMT_MJPEG; break;
+ case PIXELFORMAT_PWC1 : return V4L2_PIX_FMT_PWC1; break;
+ case PIXELFORMAT_PWC2 : return V4L2_PIX_FMT_PWC2; break;
+ case PIXELFORMAT_SN9C10X: return V4L2_PIX_FMT_SN9C10X; break;
+ case PIXELFORMAT_WNVA : return V4L2_PIX_FMT_WNVA; break;
+ case PIXELFORMAT_YYUV : return V4L2_PIX_FMT_YYUV; break;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ switch(pixelformat)
+ {
+ case PIXELFORMAT_NONE : return 0; break;
+
+// Packed RGB formats
+ case PIXELFORMAT_RGB332 : return VIDEO_PALETTE_HI240; break;
+ case PIXELFORMAT_RGB444 : return 0; break;
+ case PIXELFORMAT_RGB555 : return VIDEO_PALETTE_RGB555; break;
+ case PIXELFORMAT_RGB565 : return VIDEO_PALETTE_RGB565; break;
+ case PIXELFORMAT_RGB555X: return 0; break;
+ case PIXELFORMAT_RGB565X: return 0; break;
+ case PIXELFORMAT_BGR24 : return 0; break;
+ case PIXELFORMAT_RGB24 : return VIDEO_PALETTE_RGB24; break;
+ case PIXELFORMAT_BGR32 : return 0; break;
+ case PIXELFORMAT_RGB32 : return VIDEO_PALETTE_RGB32; break;
+
+// Bayer RGB format
+ case PIXELFORMAT_SBGGR8 : return 0; break;
+
+// YUV formats
+ case PIXELFORMAT_GREY : return VIDEO_PALETTE_GREY; break;
+ case PIXELFORMAT_YUYV : return VIDEO_PALETTE_YUYV; break;
+ case PIXELFORMAT_UYVY : return VIDEO_PALETTE_UYVY; break;
+ case PIXELFORMAT_YUV420P: return VIDEO_PALETTE_YUV420; break;
+ case PIXELFORMAT_YUV422P: return VIDEO_PALETTE_YUV422P; break;
+
+// Compressed formats
+ case PIXELFORMAT_JPEG : return 0; break;
+ case PIXELFORMAT_MPEG : return 0; break;
+
+// Reserved formats
+ case PIXELFORMAT_DV : return 0; break;
+ case PIXELFORMAT_ET61X251:return 0; break;
+ case PIXELFORMAT_HI240 : return VIDEO_PALETTE_HI240; break;
+ case PIXELFORMAT_HM12 : return 0; break;
+ case PIXELFORMAT_MJPEG : return 0; break;
+ case PIXELFORMAT_PWC1 : return 0; break;
+ case PIXELFORMAT_PWC2 : return 0; break;
+ case PIXELFORMAT_SN9C10X: return 0; break;
+ case PIXELFORMAT_WNVA : return 0; break;
+ case PIXELFORMAT_YYUV : return 0; break;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ return PIXELFORMAT_NONE; break;
+ }
+ return PIXELFORMAT_NONE;
+}
+
+int VideoDevice::pixelFormatDepth(pixel_format pixelformat)
+{
+ switch(pixelformat)
+ {
+ case PIXELFORMAT_NONE : return 0; break;
+
+// Packed RGB formats
+ case PIXELFORMAT_RGB332 : return 8; break;
+ case PIXELFORMAT_RGB444 : return 16; break;
+ case PIXELFORMAT_RGB555 : return 16; break;
+ case PIXELFORMAT_RGB565 : return 16; break;
+ case PIXELFORMAT_RGB555X: return 16; break;
+ case PIXELFORMAT_RGB565X: return 16; break;
+ case PIXELFORMAT_BGR24 : return 24; break;
+ case PIXELFORMAT_RGB24 : return 24; break;
+ case PIXELFORMAT_BGR32 : return 32; break;
+ case PIXELFORMAT_RGB32 : return 32; break;
+
+// Bayer RGB format
+ case PIXELFORMAT_SBGGR8 : return 0; break;
+
+// YUV formats
+ case PIXELFORMAT_GREY : return 8; break;
+ case PIXELFORMAT_YUYV : return 16; break;
+ case PIXELFORMAT_UYVY : return 16; break;
+ case PIXELFORMAT_YUV420P: return 16; break;
+ case PIXELFORMAT_YUV422P: return 16; break;
+
+// Compressed formats
+ case PIXELFORMAT_JPEG : return 0; break;
+ case PIXELFORMAT_MPEG : return 0; break;
+
+// Reserved formats
+ case PIXELFORMAT_DV : return 0; break;
+ case PIXELFORMAT_ET61X251:return 0; break;
+ case PIXELFORMAT_HI240 : return 8; break;
+ case PIXELFORMAT_HM12 : return 0; break;
+ case PIXELFORMAT_MJPEG : return 0; break;
+ case PIXELFORMAT_PWC1 : return 0; break;
+ case PIXELFORMAT_PWC2 : return 0; break;
+ case PIXELFORMAT_SN9C10X: return 0; break;
+ case PIXELFORMAT_WNVA : return 0; break;
+ case PIXELFORMAT_YYUV : return 0; break;
+ }
+ return 0;
+}
+
+QString VideoDevice::pixelFormatName(pixel_format pixelformat)
+{
+ QString returnvalue;
+ returnvalue = "None";
+ switch(pixelformat)
+ {
+ case PIXELFORMAT_NONE : returnvalue = "None"; break;
+
+// Packed RGB formats
+ case PIXELFORMAT_RGB332 : returnvalue = "8-bit RGB332"; break;
+ case PIXELFORMAT_RGB444 : returnvalue = "8-bit RGB444"; break;
+ case PIXELFORMAT_RGB555 : returnvalue = "16-bit RGB555"; break;
+ case PIXELFORMAT_RGB565 : returnvalue = "16-bit RGB565"; break;
+ case PIXELFORMAT_RGB555X: returnvalue = "16-bit RGB555X"; break;
+ case PIXELFORMAT_RGB565X: returnvalue = "16-bit RGB565X"; break;
+ case PIXELFORMAT_BGR24 : returnvalue = "24-bit BGR24"; break;
+ case PIXELFORMAT_RGB24 : returnvalue = "24-bit RGB24"; break;
+ case PIXELFORMAT_BGR32 : returnvalue = "32-bit BGR32"; break;
+ case PIXELFORMAT_RGB32 : returnvalue = "32-bit RGB32"; break;
+
+// Bayer RGB format
+ case PIXELFORMAT_SBGGR8 : returnvalue = "Bayer RGB format"; break;
+
+// YUV formats
+ case PIXELFORMAT_GREY : returnvalue = "8-bit Grayscale"; break;
+ case PIXELFORMAT_YUYV : returnvalue = "Packed YUV 4:2:2"; break;
+ case PIXELFORMAT_UYVY : returnvalue = "Packed YVU 4:2:2"; break;
+ case PIXELFORMAT_YUV420P: returnvalue = "Planar YUV 4:2:0"; break;
+ case PIXELFORMAT_YUV422P: returnvalue = "Planar YUV 4:2:2"; break;
+
+
+// Compressed formats
+ case PIXELFORMAT_JPEG : returnvalue = "JPEG image"; break;
+ case PIXELFORMAT_MPEG : returnvalue = "MPEG stream"; break;
+
+// Reserved formats
+ case PIXELFORMAT_DV : returnvalue = "DV (unknown)"; break;
+ case PIXELFORMAT_ET61X251:returnvalue = "ET61X251"; break;
+ case PIXELFORMAT_HI240 : returnvalue = "8-bit HI240 (RGB332)"; break;
+ case PIXELFORMAT_HM12 : returnvalue = "Packed YUV 4:2:2"; break;
+ case PIXELFORMAT_MJPEG : returnvalue = "8-bit Grayscale"; break;
+ case PIXELFORMAT_PWC1 : returnvalue = "PWC1"; break;
+ case PIXELFORMAT_PWC2 : returnvalue = "PWC2"; break;
+ case PIXELFORMAT_SN9C10X: returnvalue = "SN9C102"; break;
+ case PIXELFORMAT_WNVA : returnvalue = "Winnov Videum"; break;
+ case PIXELFORMAT_YYUV : returnvalue = "YYUV (unknown)"; break;
+ }
+ return returnvalue;
+}
+
+QString VideoDevice::pixelFormatName(int pixelformat)
+{
+ QString returnvalue;
+ returnvalue = "None";
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ switch(pixelformat)
+ {
+ case 0 : returnvalue = pixelFormatName(PIXELFORMAT_NONE); break;
+
+// Packed RGB formats
+ case V4L2_PIX_FMT_RGB332 : returnvalue = pixelFormatName(PIXELFORMAT_RGB332); break;
+#if defined( V4L2_PIX_FMT_RGB444 )
+ case V4L2_PIX_FMT_RGB444 : returnvalue = pixelFormatName(PIXELFORMAT_RGB444); break;
+#endif
+ case V4L2_PIX_FMT_RGB555 : returnvalue = pixelFormatName(PIXELFORMAT_RGB555); break;
+ case V4L2_PIX_FMT_RGB565 : returnvalue = pixelFormatName(PIXELFORMAT_RGB565); break;
+ case V4L2_PIX_FMT_RGB555X : returnvalue = pixelFormatName(PIXELFORMAT_RGB555X); break;
+ case V4L2_PIX_FMT_RGB565X : returnvalue = pixelFormatName(PIXELFORMAT_RGB565X); break;
+ case V4L2_PIX_FMT_BGR24 : returnvalue = pixelFormatName(PIXELFORMAT_BGR24); break;
+ case V4L2_PIX_FMT_RGB24 : returnvalue = pixelFormatName(PIXELFORMAT_RGB24); break;
+ case V4L2_PIX_FMT_BGR32 : returnvalue = pixelFormatName(PIXELFORMAT_BGR32); break;
+ case V4L2_PIX_FMT_RGB32 : returnvalue = pixelFormatName(PIXELFORMAT_RGB32); break;
+
+// Bayer RGB format
+ case V4L2_PIX_FMT_SBGGR8 : returnvalue = pixelFormatName(PIXELFORMAT_SBGGR8); break;
+
+// YUV formats
+ case V4L2_PIX_FMT_GREY : returnvalue = pixelFormatName(PIXELFORMAT_GREY); break;
+ case V4L2_PIX_FMT_YUYV : returnvalue = pixelFormatName(PIXELFORMAT_YUYV); break;
+ case V4L2_PIX_FMT_UYVY : returnvalue = pixelFormatName(PIXELFORMAT_UYVY); break;
+ case V4L2_PIX_FMT_YUV420 : returnvalue = pixelFormatName(PIXELFORMAT_YUV420P); break;
+ case V4L2_PIX_FMT_YUV422P : returnvalue = pixelFormatName(PIXELFORMAT_YUV422P); break;
+
+// Compressed formats
+ case V4L2_PIX_FMT_JPEG : returnvalue = pixelFormatName(PIXELFORMAT_JPEG); break;
+ case V4L2_PIX_FMT_MPEG : returnvalue = pixelFormatName(PIXELFORMAT_MPEG); break;
+
+// Reserved formats
+ case V4L2_PIX_FMT_DV : returnvalue = pixelFormatName(PIXELFORMAT_DV); break;
+ case V4L2_PIX_FMT_ET61X251 : returnvalue = pixelFormatName(PIXELFORMAT_ET61X251); break;
+ case V4L2_PIX_FMT_HI240 : returnvalue = pixelFormatName(PIXELFORMAT_HI240); break;
+#if defined( V4L2_PIX_FMT_HM12 )
+ case V4L2_PIX_FMT_HM12 : returnvalue = pixelFormatName(PIXELFORMAT_HM12); break;
+#endif
+ case V4L2_PIX_FMT_MJPEG : returnvalue = pixelFormatName(PIXELFORMAT_MJPEG); break;
+ case V4L2_PIX_FMT_PWC1 : returnvalue = pixelFormatName(PIXELFORMAT_PWC1); break;
+ case V4L2_PIX_FMT_PWC2 : returnvalue = pixelFormatName(PIXELFORMAT_PWC2); break;
+ case V4L2_PIX_FMT_SN9C10X : returnvalue = pixelFormatName(PIXELFORMAT_SN9C10X); break;
+ case V4L2_PIX_FMT_WNVA : returnvalue = pixelFormatName(PIXELFORMAT_WNVA); break;
+ case V4L2_PIX_FMT_YYUV : returnvalue = pixelFormatName(PIXELFORMAT_YYUV); break;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ switch(pixelformat)
+ {
+ case VIDEO_PALETTE_GREY : returnvalue = pixelFormatName(PIXELFORMAT_GREY); break;
+ case VIDEO_PALETTE_HI240 : returnvalue = pixelFormatName(PIXELFORMAT_RGB332); break;
+ case VIDEO_PALETTE_RGB555 : returnvalue = pixelFormatName(PIXELFORMAT_RGB555); break;
+ case VIDEO_PALETTE_RGB565 : returnvalue = pixelFormatName(PIXELFORMAT_RGB565); break;
+ case VIDEO_PALETTE_RGB24 : returnvalue = pixelFormatName(PIXELFORMAT_RGB24); break;
+ case VIDEO_PALETTE_RGB32 : returnvalue = pixelFormatName(PIXELFORMAT_RGB32); break;
+ case VIDEO_PALETTE_YUYV : returnvalue = pixelFormatName(PIXELFORMAT_YUYV); break;
+ case VIDEO_PALETTE_UYVY : returnvalue = pixelFormatName(PIXELFORMAT_UYVY); break;
+ case VIDEO_PALETTE_YUV420 :
+ case VIDEO_PALETTE_YUV420P : returnvalue = pixelFormatName(PIXELFORMAT_YUV420P); break;
+ case VIDEO_PALETTE_YUV422P : returnvalue = pixelFormatName(PIXELFORMAT_YUV422P); break;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ break;
+ }
+ return returnvalue;
+}
+
+int VideoDevice::detectPixelFormats()
+{
+ int err = 0;
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ fmtdesc.index = 0;
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ while ( err == 0 )
+ {
+ if (-1 == xioctl (VIDIOC_ENUM_FMT, &fmtdesc))
+// if (ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) < 0 )
+ {
+ perror("VIDIOC_ENUM_FMT");
+ err = errno;
+ }
+ else
+ {
+ kdDebug(14010) << k_funcinfo << fmtdesc.pixelformat << " " << pixelFormatName(fmtdesc.pixelformat) << endl; // Need a cleanup. PixelFormatForPalette is a really bad name
+ fmtdesc.index++;
+ }
+ }
+// break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+// TODO: THis thing can be used to detec what pixel formats are supported in a API-independent way, but V4L2 has VIDIOC_ENUM_PIXFMT.
+// The correct thing to do is to isolate these calls and do a proper implementation for V4L and another for V4L2 when this thing will be migrated to a plugin architecture.
+
+// Packed RGB formats
+ kdDebug(14010) << k_funcinfo << "Supported pixel formats:" << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_RGB332)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_RGB332) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_RGB444)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_RGB444) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_RGB555)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_RGB555) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_RGB565)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_RGB565) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_RGB555X)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_RGB555X) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_RGB565X)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_RGB565X) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_BGR24)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_BGR24) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_RGB24)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_RGB24) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_BGR32)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_BGR32) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_RGB32)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_RGB32) << endl;
+
+// Bayer RGB format
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_SBGGR8)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_SBGGR8) << endl;
+
+// YUV formats
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_GREY)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_GREY) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_YUYV)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_YUYV) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_UYVY)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_UYVY) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_YUV420P)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_YUV420P) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_YUV422P)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_YUV422P) << endl;
+
+// Compressed formats
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_JPEG)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_JPEG) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_MPEG)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_MPEG) << endl;
+
+// Reserved formats
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_DV)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_DV) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_ET61X251)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_ET61X251) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_HI240)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_HI240) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_HM12)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_HM12) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_MJPEG)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_MJPEG) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_PWC1)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_PWC1) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_PWC2)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_PWC2) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_SN9C10X)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_SN9C10X) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_WNVA)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_WNVA) << endl;
+ if(PIXELFORMAT_NONE != setPixelFormat(PIXELFORMAT_YYUV)) kdDebug(14010) << k_funcinfo << pixelFormatName(PIXELFORMAT_YYUV) << endl;
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ return PIXELFORMAT_NONE; break;
+ }
+ return PIXELFORMAT_NONE;
+}
+
+__u64 VideoDevice::signalStandardCode(signal_standard standard)
+{
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ switch(standard)
+ {
+ case STANDARD_NONE : return V4L2_STD_UNKNOWN; break;
+ case STANDARD_PAL_B : return V4L2_STD_PAL_B; break;
+ case STANDARD_PAL_B1 : return V4L2_STD_PAL_B1; break;
+ case STANDARD_PAL_G : return V4L2_STD_PAL_G; break;
+ case STANDARD_PAL_H : return V4L2_STD_PAL_H; break;
+ case STANDARD_PAL_I : return V4L2_STD_PAL_I; break;
+ case STANDARD_PAL_D : return V4L2_STD_PAL_D; break;
+ case STANDARD_PAL_D1 : return V4L2_STD_PAL_D1; break;
+ case STANDARD_PAL_K : return V4L2_STD_PAL_K; break;
+ case STANDARD_PAL_M : return V4L2_STD_PAL_M; break;
+ case STANDARD_PAL_N : return V4L2_STD_PAL_N; break;
+ case STANDARD_PAL_Nc : return V4L2_STD_PAL_Nc; break;
+ case STANDARD_PAL_60 : return V4L2_STD_PAL_60; break;
+ case STANDARD_NTSC_M : return V4L2_STD_NTSC_M; break;
+ case STANDARD_NTSC_M_JP : return V4L2_STD_NTSC_M_JP; break;
+ case STANDARD_NTSC_443 : return V4L2_STD_NTSC; break; // Using workaround value because my videodev2.h header seems to not include this standard in struct __u64 v4l2_std_id
+ case STANDARD_SECAM_B : return V4L2_STD_SECAM_B; break;
+ case STANDARD_SECAM_D : return V4L2_STD_SECAM_D; break;
+ case STANDARD_SECAM_G : return V4L2_STD_SECAM_G; break;
+ case STANDARD_SECAM_H : return V4L2_STD_SECAM_H; break;
+ case STANDARD_SECAM_K : return V4L2_STD_SECAM_K; break;
+ case STANDARD_SECAM_K1 : return V4L2_STD_SECAM_K1; break;
+ case STANDARD_SECAM_L : return V4L2_STD_SECAM_L; break;
+ case STANDARD_SECAM_LC : return V4L2_STD_SECAM; break; // Using workaround value because my videodev2.h header seems to not include this standard in struct __u64 v4l2_std_id
+ case STANDARD_ATSC_8_VSB : return V4L2_STD_ATSC_8_VSB; break; // ATSC/HDTV Standard officially not supported by V4L2 but exists in videodev2.h
+ case STANDARD_ATSC_16_VSB : return V4L2_STD_ATSC_16_VSB; break; // ATSC/HDTV Standard officially not supported by V4L2 but exists in videodev2.h
+ case STANDARD_PAL_BG : return V4L2_STD_PAL_BG; break;
+ case STANDARD_PAL_DK : return V4L2_STD_PAL_DK; break;
+ case STANDARD_PAL : return V4L2_STD_PAL; break;
+ case STANDARD_NTSC : return V4L2_STD_NTSC; break;
+ case STANDARD_SECAM_DK : return V4L2_STD_SECAM_DK; break;
+ case STANDARD_SECAM : return V4L2_STD_SECAM; break;
+ case STANDARD_525_60 : return V4L2_STD_525_60; break;
+ case STANDARD_625_50 : return V4L2_STD_625_50; break;
+ case STANDARD_ALL : return V4L2_STD_ALL; break;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ switch(standard)
+ {
+ case STANDARD_NONE : return VIDEO_MODE_AUTO; break;
+ case STANDARD_PAL_B : return VIDEO_MODE_PAL; break;
+ case STANDARD_PAL_B1 : return VIDEO_MODE_PAL; break;
+ case STANDARD_PAL_G : return VIDEO_MODE_PAL; break;
+ case STANDARD_PAL_H : return VIDEO_MODE_PAL; break;
+ case STANDARD_PAL_I : return VIDEO_MODE_PAL; break;
+ case STANDARD_PAL_D : return VIDEO_MODE_PAL; break;
+ case STANDARD_PAL_D1 : return VIDEO_MODE_PAL; break;
+ case STANDARD_PAL_K : return VIDEO_MODE_PAL; break;
+ case STANDARD_PAL_M : return 5; break; // Undocumented value found to be compatible with V4L bttv driver
+ case STANDARD_PAL_N : return 6; break; // Undocumented value found to be compatible with V4L bttv driver
+ case STANDARD_PAL_Nc : return 4; break; // Undocumented value found to be compatible with V4L bttv driver
+ case STANDARD_PAL_60 : return VIDEO_MODE_PAL; break;
+ case STANDARD_NTSC_M : return VIDEO_MODE_NTSC; break;
+ case STANDARD_NTSC_M_JP : return 7; break; // Undocumented value found to be compatible with V4L bttv driver
+ case STANDARD_NTSC_443 : return VIDEO_MODE_NTSC; break; // Using workaround value because my videodev2.h header seems to not include this standard in struct __u64 v4l2_std_id
+ case STANDARD_SECAM_B : return VIDEO_MODE_SECAM; break;
+ case STANDARD_SECAM_D : return VIDEO_MODE_SECAM; break;
+ case STANDARD_SECAM_G : return VIDEO_MODE_SECAM; break;
+ case STANDARD_SECAM_H : return VIDEO_MODE_SECAM; break;
+ case STANDARD_SECAM_K : return VIDEO_MODE_SECAM; break;
+ case STANDARD_SECAM_K1 : return VIDEO_MODE_SECAM; break;
+ case STANDARD_SECAM_L : return VIDEO_MODE_SECAM; break;
+ case STANDARD_SECAM_LC : return VIDEO_MODE_SECAM; break; // Using workaround value because my videodev2.h header seems to not include this standard in struct __u64 v4l2_std_id
+ case STANDARD_ATSC_8_VSB : return VIDEO_MODE_AUTO; break; // ATSC/HDTV Standard officially not supported by V4L2 but exists in videodev2.h
+ case STANDARD_ATSC_16_VSB : return VIDEO_MODE_AUTO; break; // ATSC/HDTV Standard officially not supported by V4L2 but exists in videodev2.h
+ case STANDARD_PAL_BG : return VIDEO_MODE_PAL; break;
+ case STANDARD_PAL_DK : return VIDEO_MODE_PAL; break;
+ case STANDARD_PAL : return VIDEO_MODE_PAL; break;
+ case STANDARD_NTSC : return VIDEO_MODE_NTSC; break;
+ case STANDARD_SECAM_DK : return VIDEO_MODE_SECAM; break;
+ case STANDARD_SECAM : return VIDEO_MODE_SECAM; break;
+ case STANDARD_525_60 : return VIDEO_MODE_PAL; break;
+ case STANDARD_625_50 : return VIDEO_MODE_SECAM; break;
+ case STANDARD_ALL : return VIDEO_MODE_AUTO; break;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ return STANDARD_NONE; break;
+ }
+ return STANDARD_NONE;
+}
+
+QString VideoDevice::signalStandardName(signal_standard standard)
+{
+ QString returnvalue;
+ returnvalue = "None";
+ switch(standard)
+ {
+ case STANDARD_NONE : returnvalue = "None"; break;
+ case STANDARD_PAL_B : returnvalue = "PAL-B"; break;
+ case STANDARD_PAL_B1 : returnvalue = "PAL-B1"; break;
+ case STANDARD_PAL_G : returnvalue = "PAL-G"; break;
+ case STANDARD_PAL_H : returnvalue = "PAL-H"; break;
+ case STANDARD_PAL_I : returnvalue = "PAL-I"; break;
+ case STANDARD_PAL_D : returnvalue = "PAL-D"; break;
+ case STANDARD_PAL_D1 : returnvalue = "PAL-D1"; break;
+ case STANDARD_PAL_K : returnvalue = "PAL-K"; break;
+ case STANDARD_PAL_M : returnvalue = "PAL-M"; break;
+ case STANDARD_PAL_N : returnvalue = "PAL-N"; break;
+ case STANDARD_PAL_Nc : returnvalue = "PAL-Nc"; break;
+ case STANDARD_PAL_60 : returnvalue = "PAL-60"; break;
+ case STANDARD_NTSC_M : returnvalue = "NTSC-M"; break;
+ case STANDARD_NTSC_M_JP : returnvalue = "NTSC-M(JP)"; break;
+ case STANDARD_NTSC_443 : returnvalue = "NTSC-443"; break;
+ case STANDARD_SECAM_B : returnvalue = "SECAM-B"; break;
+ case STANDARD_SECAM_D : returnvalue = "SECAM-D"; break;
+ case STANDARD_SECAM_G : returnvalue = "SECAM-G"; break;
+ case STANDARD_SECAM_H : returnvalue = "SECAM-H"; break;
+ case STANDARD_SECAM_K : returnvalue = "SECAM-K"; break;
+ case STANDARD_SECAM_K1 : returnvalue = "SECAM-K1"; break;
+ case STANDARD_SECAM_L : returnvalue = "SECAM-L"; break;
+ case STANDARD_SECAM_LC : returnvalue = "SECAM-LC"; break;
+ case STANDARD_ATSC_8_VSB : returnvalue = "ATSC-8-VSB"; break; // ATSC/HDTV Standard officially not supported by V4L2 but exists in videodev2.h
+ case STANDARD_ATSC_16_VSB : returnvalue = "ATSC-16-VSB"; break; // ATSC/HDTV Standard officially not supported by V4L2 but exists in videodev2.h
+ case STANDARD_PAL_BG : returnvalue = "PAL-BG"; break;
+ case STANDARD_PAL_DK : returnvalue = "PAL-DK"; break;
+ case STANDARD_PAL : returnvalue = "PAL"; break;
+ case STANDARD_NTSC : returnvalue = "NTSC"; break;
+ case STANDARD_SECAM_DK : returnvalue = "SECAM-DK"; break;
+ case STANDARD_SECAM : returnvalue = "SECAM"; break;
+ case STANDARD_525_60 : returnvalue = "525 lines 60Hz"; break;
+ case STANDARD_625_50 : returnvalue = "625 lines 50Hz"; break;
+ case STANDARD_ALL : returnvalue = "All"; break;
+ }
+ return returnvalue;
+}
+
+QString VideoDevice::signalStandardName(int standard)
+{
+ QString returnvalue;
+ returnvalue = "None";
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ switch(standard)
+ {
+ case V4L2_STD_PAL_B : returnvalue = signalStandardName(STANDARD_PAL_B); break;
+ case V4L2_STD_PAL_B1 : returnvalue = signalStandardName(STANDARD_PAL_B1); break;
+ case V4L2_STD_PAL_G : returnvalue = signalStandardName(STANDARD_PAL_G); break;
+ case V4L2_STD_PAL_H : returnvalue = signalStandardName(STANDARD_PAL_H); break;
+ case V4L2_STD_PAL_I : returnvalue = signalStandardName(STANDARD_PAL_I); break;
+ case V4L2_STD_PAL_D : returnvalue = signalStandardName(STANDARD_PAL_D); break;
+ case V4L2_STD_PAL_D1 : returnvalue = signalStandardName(STANDARD_PAL_D1); break;
+ case V4L2_STD_PAL_K : returnvalue = signalStandardName(STANDARD_PAL_K); break;
+ case V4L2_STD_PAL_M : returnvalue = signalStandardName(STANDARD_PAL_M); break;
+ case V4L2_STD_PAL_N : returnvalue = signalStandardName(STANDARD_PAL_N); break;
+ case V4L2_STD_PAL_Nc : returnvalue = signalStandardName(STANDARD_PAL_Nc); break;
+ case V4L2_STD_PAL_60 : returnvalue = signalStandardName(STANDARD_PAL_60); break;
+ case V4L2_STD_NTSC_M : returnvalue = signalStandardName(STANDARD_NTSC_M); break;
+ case V4L2_STD_NTSC_M_JP : returnvalue = signalStandardName(STANDARD_NTSC_M_JP); break;
+// case V4L2_STD_NTSC_443 : returnvalue = signalStandardName(STANDARD_NTSC_443); break; // Commented out because my videodev2.h header seems to not include this standard in struct __u64 v4l2_std_id
+ case V4L2_STD_SECAM_B : returnvalue = signalStandardName(STANDARD_SECAM_B); break;
+ case V4L2_STD_SECAM_D : returnvalue = signalStandardName(STANDARD_SECAM_D); break;
+ case V4L2_STD_SECAM_G : returnvalue = signalStandardName(STANDARD_SECAM_G); break;
+ case V4L2_STD_SECAM_H : returnvalue = signalStandardName(STANDARD_SECAM_H); break;
+ case V4L2_STD_SECAM_K : returnvalue = signalStandardName(STANDARD_SECAM_K); break;
+ case V4L2_STD_SECAM_K1 : returnvalue = signalStandardName(STANDARD_SECAM_K1); break;
+ case V4L2_STD_SECAM_L : returnvalue = signalStandardName(STANDARD_SECAM_L); break;
+// case V4L2_STD_SECAM_LC : returnvalue = signalStandardName(STANDARD_SECAM_LC); break; // Commented out because my videodev2.h header seems to not include this standard in struct __u64 v4l2_std_id
+ case V4L2_STD_ATSC_8_VSB : returnvalue = signalStandardName(STANDARD_ATSC_8_VSB); break; // ATSC/HDTV Standard officially not supported by V4L2 but exists in videodev2.h
+ case V4L2_STD_ATSC_16_VSB : returnvalue = signalStandardName(STANDARD_ATSC_16_VSB); break; // ATSC/HDTV Standard officially not supported by V4L2 but exists in videodev2.h
+ case V4L2_STD_PAL_BG : returnvalue = signalStandardName(STANDARD_PAL_BG); break;
+ case V4L2_STD_PAL_DK : returnvalue = signalStandardName(STANDARD_PAL_DK); break;
+ case V4L2_STD_PAL : returnvalue = signalStandardName(STANDARD_PAL); break;
+ case V4L2_STD_NTSC : returnvalue = signalStandardName(STANDARD_NTSC); break;
+ case V4L2_STD_SECAM_DK : returnvalue = signalStandardName(STANDARD_SECAM_DK); break;
+ case V4L2_STD_SECAM : returnvalue = signalStandardName(STANDARD_SECAM); break;
+ case V4L2_STD_525_60 : returnvalue = signalStandardName(STANDARD_525_60); break;
+ case V4L2_STD_625_50 : returnvalue = signalStandardName(STANDARD_625_50); break;
+ case V4L2_STD_ALL : returnvalue = signalStandardName(STANDARD_ALL); break;
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ switch(standard)
+ {
+ case VIDEO_MODE_PAL : returnvalue = signalStandardName(STANDARD_PAL); break;
+ case VIDEO_MODE_NTSC : returnvalue = signalStandardName(STANDARD_NTSC); break;
+ case VIDEO_MODE_SECAM : returnvalue = signalStandardName(STANDARD_SECAM); break;
+ case VIDEO_MODE_AUTO : returnvalue = signalStandardName(STANDARD_ALL); break; // It must be disabled until I find a correct way to handle those non-standard bttv modes
+// case VIDEO_MODE_PAL_Nc : returnvalue = signalStandardName(STANDARD_PAL_Nc); break; // Undocumented value found to be compatible with V4L bttv driver
+ case VIDEO_MODE_PAL_M : returnvalue = signalStandardName(STANDARD_PAL_M); break; // Undocumented value found to be compatible with V4L bttv driver
+ case VIDEO_MODE_PAL_N : returnvalue = signalStandardName(STANDARD_PAL_N); break; // Undocumented value found to be compatible with V4L bttv driver
+ case VIDEO_MODE_NTSC_JP : returnvalue = signalStandardName(STANDARD_NTSC_M_JP); break; // Undocumented value found to be compatible with V4L bttv driver
+ }
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ break;
+ }
+ return returnvalue;
+}
+
+/*!
+ \fn VideoDevice::detectSignalStandards()
+ */
+int VideoDevice::detectSignalStandards()
+{
+ switch(m_driver)
+ {
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ case VIDEODEV_DRIVER_V4L2:
+ break;
+#endif
+ case VIDEODEV_DRIVER_V4L:
+ break;
+#endif
+ case VIDEODEV_DRIVER_NONE:
+ default:
+ break;
+ }
+ //FIXME: return a real value
+ return 0;
+}
+
+/*!
+ \fn VideoDevice::initRead()
+ */
+int VideoDevice::initRead()
+{
+ /// @todo implement me
+
+ kdDebug(14010) << k_funcinfo << "called." << endl;
+ if(isOpen())
+ {
+ m_rawbuffers.resize(1);
+ if (m_rawbuffers.size()==0)
+ {
+ fprintf (stderr, "Out of memory\n");
+ return EXIT_FAILURE;
+ }
+ kdDebug(14010) << k_funcinfo << "m_buffer_size: " << m_buffer_size << endl;
+
+// m_rawbuffers[0].pixelformat=m_pixelformat;
+ m_rawbuffers[0].length = m_buffer_size;
+ m_rawbuffers[0].start = (uchar *)malloc (m_buffer_size);
+
+ if (!m_rawbuffers[0].start)
+ {
+ fprintf (stderr, "Out of memory\n");
+ return EXIT_FAILURE;
+ }
+ kdDebug(14010) << k_funcinfo << "exited successfuly." << endl;
+ return EXIT_SUCCESS;
+ }
+ return EXIT_FAILURE;
+}
+
+
+/*!
+ \fn VideoDevice::initMmap()
+ */
+int VideoDevice::initMmap()
+{
+ /// @todo implement me
+#define BUFFERS 2
+ if(isOpen())
+ {
+ kdDebug(14010) << k_funcinfo << full_filename << " Trying to MMAP" << endl;
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ struct v4l2_requestbuffers req;
+
+ CLEAR (req);
+
+ req.count = BUFFERS;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_MMAP;
+
+ if (-1 == xioctl (VIDIOC_REQBUFS, &req))
+ {
+ if (EINVAL == errno)
+ {
+ kdDebug(14010) << k_funcinfo << full_filename << " does not support memory mapping" << endl;
+ return EXIT_FAILURE;
+ }
+ else
+ {
+ return errnoReturn ("VIDIOC_REQBUFS");
+ }
+ }
+
+ if (req.count < BUFFERS)
+ {
+ kdDebug(14010) << k_funcinfo << "Insufficient buffer memory on " << full_filename << endl;
+ return EXIT_FAILURE;
+ }
+
+ m_rawbuffers.resize(req.count);
+
+ if (m_rawbuffers.size()==0)
+ {
+ kdDebug(14010) << k_funcinfo << "Out of memory" << endl;
+ return EXIT_FAILURE;
+ }
+
+ for (m_streambuffers = 0; m_streambuffers < req.count; ++m_streambuffers)
+ {
+ struct v4l2_buffer v4l2buffer;
+
+ CLEAR (v4l2buffer);
+
+ v4l2buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ v4l2buffer.memory = V4L2_MEMORY_MMAP;
+ v4l2buffer.index = m_streambuffers;
+
+ if (-1 == xioctl (VIDIOC_QUERYBUF, &v4l2buffer))
+ return errnoReturn ("VIDIOC_QUERYBUF");
+
+ m_rawbuffers[m_streambuffers].length = v4l2buffer.length;
+ m_rawbuffers[m_streambuffers].start = (uchar *) mmap (NULL /* start anywhere */, v4l2buffer.length, PROT_READ | PROT_WRITE /* required */, MAP_SHARED /* recommended */, descriptor, v4l2buffer.m.offset);
+
+ if (MAP_FAILED == m_rawbuffers[m_streambuffers].start)
+ return errnoReturn ("mmap");
+ }
+#endif
+ m_currentbuffer.data.resize(m_rawbuffers[0].length); // Makes the imagesize.data buffer size equal to the rawbuffer size
+ kdDebug(14010) << k_funcinfo << full_filename << " m_currentbuffer.data.size(): " << m_currentbuffer.data.size() << endl;
+ return EXIT_SUCCESS;
+ }
+ return EXIT_FAILURE;
+}
+
+
+/*!
+ \fn VideoDevice::initUserptr()
+ */
+int VideoDevice::initUserptr()
+{
+ /// @todo implement me
+ if(isOpen())
+ {
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ struct v4l2_requestbuffers req;
+
+ CLEAR (req);
+
+ req.count = 2;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_USERPTR;
+
+ if (-1 == xioctl (VIDIOC_REQBUFS, &req))
+ {
+ if (EINVAL == errno)
+ {
+ kdDebug(14010) << k_funcinfo << full_filename << " does not support memory mapping" << endl;
+ return EXIT_FAILURE;
+ }
+ else
+ {
+ return errnoReturn ("VIDIOC_REQBUFS");
+ }
+ }
+
+ m_rawbuffers.resize(4);
+
+ if (m_rawbuffers.size()==0)
+ {
+ fprintf (stderr, "Out of memory\n");
+ return EXIT_FAILURE;
+ }
+
+ for (m_streambuffers = 0; m_streambuffers < 4; ++m_streambuffers)
+ {
+ m_rawbuffers[m_streambuffers].length = m_buffer_size;
+ m_rawbuffers[m_streambuffers].start = (uchar *) malloc (m_buffer_size);
+
+ if (!m_rawbuffers[m_streambuffers].start)
+ {
+ kdDebug(14010) << k_funcinfo << "Out of memory" << endl;
+ return EXIT_FAILURE;
+ }
+ }
+#endif
+ return EXIT_SUCCESS;
+ }
+ return EXIT_FAILURE;
+}
+
+bool VideoDevice::canCapture()
+{
+ return m_videocapture;
+}
+
+bool VideoDevice::canChromakey()
+{
+ return m_videochromakey;
+}
+
+bool VideoDevice::canScale()
+{
+ return m_videoscale;
+}
+
+bool VideoDevice::canOverlay()
+{
+ return m_videooverlay;
+}
+
+bool VideoDevice::canRead()
+{
+ return m_videoread;
+}
+
+bool VideoDevice::canAsyncIO()
+{
+ return m_videoasyncio;
+}
+
+bool VideoDevice::canStream()
+{
+ return m_videostream;
+}
+
+
+
+}
+
+}
diff --git a/kopete/libkopete/avdevice/videodevice.h b/kopete/libkopete/avdevice/videodevice.h
new file mode 100644
index 00000000..982ab5f3
--- /dev/null
+++ b/kopete/libkopete/avdevice/videodevice.h
@@ -0,0 +1,333 @@
+/*
+ videodevice.cpp - Kopete Video Device Low-level Support
+
+ Copyright (c) 2005-2006 by Cláudio da Silveira Pinheiro <[email protected]>
+
+ Kopete (c) 2002-2003 by the Kopete developers <[email protected]>
+
+ *************************************************************************
+ * *
+ * This library is free software; you can redistribute it and/or *
+ * modify it under the terms of the GNU Lesser General Public *
+ * License as published by the Free Software Foundation; either *
+ * version 2 of the License, or (at your option) any later version. *
+ * *
+ *************************************************************************
+*/
+
+#define ENABLE_AV
+
+#ifndef KOPETE_AVVIDEODEVICELISTITEM_H
+#define KOPETE_AVVIDEODEVICELISTITEM_H
+
+#if defined HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <sys/time.h>
+#include <sys/mman.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <signal.h>
+
+#if defined(__linux__) && defined(ENABLE_AV)
+
+#include <asm/types.h>
+#undef __STRICT_ANSI__
+#ifndef __u64 //required by videodev.h
+#define __u64 unsigned long long
+#endif // __u64
+
+#ifndef __s64 //required by videodev.h
+#define __s64 long long
+#endif // __s64
+
+
+#ifndef pgoff_t
+#define pgoff_t unsigned long
+#endif
+
+#include <linux/fs.h>
+#include <linux/kernel.h>
+#include <linux/videodev.h>
+#define VIDEO_MODE_PAL_Nc 3
+#define VIDEO_MODE_PAL_M 4
+#define VIDEO_MODE_PAL_N 5
+#define VIDEO_MODE_NTSC_JP 6
+#define __STRICT_ANSI__
+
+#endif // __linux__
+
+#include <qstring.h>
+#include <qfile.h>
+#include <qimage.h>
+#include <qvaluevector.h>
+#include <kcombobox.h>
+
+#include "videoinput.h"
+#include "videocontrol.h"
+
+namespace Kopete {
+
+namespace AV {
+
+/**
+@author Kopete Developers
+*/
+typedef enum
+{
+ VIDEODEV_DRIVER_NONE
+#if defined( __linux__) && defined(ENABLE_AV)
+ ,
+ VIDEODEV_DRIVER_V4L
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ ,
+ VIDEODEV_DRIVER_V4L2
+#endif
+#endif
+} videodev_driver;
+
+typedef enum
+{
+// Packed RGB formats
+ PIXELFORMAT_NONE = 0,
+ PIXELFORMAT_GREY = (1 << 0),
+ PIXELFORMAT_RGB332 = (1 << 1),
+ PIXELFORMAT_RGB444 = (1 << 2),
+ PIXELFORMAT_RGB555 = (1 << 3),
+ PIXELFORMAT_RGB565 = (1 << 4),
+ PIXELFORMAT_RGB555X = (1 << 5),
+ PIXELFORMAT_RGB565X = (1 << 6),
+ PIXELFORMAT_BGR24 = (1 << 7),
+ PIXELFORMAT_RGB24 = (1 << 8),
+ PIXELFORMAT_BGR32 = (1 << 9),
+ PIXELFORMAT_RGB32 = (1 << 10),
+
+// Bayer RGB format
+ PIXELFORMAT_SBGGR8 = (1 << 11),
+
+// YUV formats
+ PIXELFORMAT_YUYV = (1 << 12),
+ PIXELFORMAT_UYVY = (1 << 13),
+ PIXELFORMAT_YUV420P = (1 << 14),
+ PIXELFORMAT_YUV422P = (1 << 15),
+
+// Compressed formats
+ PIXELFORMAT_JPEG = (1 << 16),
+ PIXELFORMAT_MPEG = (1 << 17),
+
+// Reserved formats
+ PIXELFORMAT_DV = (1 << 18),
+ PIXELFORMAT_ET61X251 = (1 << 19),
+ PIXELFORMAT_HI240 = (1 << 20),
+ PIXELFORMAT_HM12 = (1 << 21),
+ PIXELFORMAT_MJPEG = (1 << 22),
+ PIXELFORMAT_PWC1 = (1 << 23),
+ PIXELFORMAT_PWC2 = (1 << 24),
+ PIXELFORMAT_SN9C10X = (1 << 25),
+ PIXELFORMAT_WNVA = (1 << 26),
+ PIXELFORMAT_YYUV = (1 << 27)
+
+// PIXELFORMAT_ALL = 0x00003FFF
+} pixel_format;
+
+typedef enum
+{
+ STANDARD_NONE = 0,
+ STANDARD_PAL_B = (1 << 0),
+ STANDARD_PAL_B1 = (1 << 1),
+ STANDARD_PAL_G = (1 << 2),
+ STANDARD_PAL_H = (1 << 3),
+ STANDARD_PAL_I = (1 << 4),
+ STANDARD_PAL_D = (1 << 5),
+ STANDARD_PAL_D1 = (1 << 6),
+ STANDARD_PAL_K = (1 << 7),
+ STANDARD_PAL_M = (1 << 8),
+ STANDARD_PAL_N = (1 << 9),
+ STANDARD_PAL_Nc = (1 << 10),
+ STANDARD_PAL_60 = (1 << 11),
+// STANDARD_PAL_60 is a hybrid standard with 525 lines, 60 Hz refresh rate, and PAL color modulation with a 4.43 MHz color subcarrier. Some PAL video recorders can play back NTSC tapes in this mode for display on a 50/60 Hz agnostic PAL TV.
+ STANDARD_NTSC_M = (1 << 12),
+ STANDARD_NTSC_M_JP = (1 << 13),
+ STANDARD_NTSC_443 = (1 << 14),
+// STANDARD_NTSC_443 is a hybrid standard with 525 lines, 60 Hz refresh rate, and NTSC color modulation with a 4.43 MHz color subcarrier.
+ STANDARD_SECAM_B = (1 << 16),
+ STANDARD_SECAM_D = (1 << 17),
+ STANDARD_SECAM_G = (1 << 18),
+ STANDARD_SECAM_H = (1 << 19),
+ STANDARD_SECAM_K = (1 << 20),
+ STANDARD_SECAM_K1 = (1 << 21),
+ STANDARD_SECAM_L = (1 << 22),
+ STANDARD_SECAM_LC = (1 << 23),
+// ATSC/HDTV
+ STANDARD_ATSC_8_VSB = (1 << 24),
+ STANDARD_ATSC_16_VSB = (1 << 25),
+
+ STANDARD_PAL_BG = ( STANDARD_PAL_B | STANDARD_PAL_B1 | STANDARD_PAL_G ),
+ STANDARD_PAL_DK = ( STANDARD_PAL_D | STANDARD_PAL_D1 | STANDARD_PAL_K ),
+ STANDARD_PAL = ( STANDARD_PAL_BG | STANDARD_PAL_DK | STANDARD_PAL_H | STANDARD_PAL_I ),
+ STANDARD_NTSC = ( STANDARD_NTSC_M | STANDARD_NTSC_M_JP ),
+ STANDARD_SECAM_DK = ( STANDARD_SECAM_D | STANDARD_SECAM_K | STANDARD_SECAM_K1 ),
+ STANDARD_SECAM = ( STANDARD_SECAM_B | STANDARD_SECAM_G | STANDARD_SECAM_H | STANDARD_SECAM_DK | STANDARD_SECAM_L),
+ STANDARD_525_60 = ( STANDARD_PAL_M | STANDARD_PAL_60 | STANDARD_NTSC | STANDARD_NTSC_443),
+ STANDARD_625_50 = ( STANDARD_PAL | STANDARD_PAL_N | STANDARD_PAL_Nc | STANDARD_SECAM),
+ STANDARD_ALL = ( STANDARD_525_60 | STANDARD_625_50)
+} signal_standard;
+
+
+typedef enum
+{
+ IO_METHOD_NONE,
+ IO_METHOD_READ,
+ IO_METHOD_MMAP,
+ IO_METHOD_USERPTR
+} io_method;
+
+struct imagebuffer
+{
+ int height;
+ int width;
+ pixel_format pixelformat;
+ QValueVector <uchar> data; // maybe it should be a rawbuffer instead of it? It could make us avoid a memory copy
+};
+struct rawbuffer // raw buffer
+{
+ uchar * start;
+ size_t length;
+};
+
+
+class VideoDevice{
+public:
+ VideoDevice();
+ ~VideoDevice();
+ int setFileName(QString filename);
+ int open();
+ bool isOpen();
+ int checkDevice();
+ int showDeviceCapabilities();
+ int initDevice();
+ unsigned int inputs();
+ int width();
+ int minWidth();
+ int maxWidth();
+ int height();
+ int minHeight();
+ int maxHeight();
+ int setSize( int newwidth, int newheight);
+
+ pixel_format setPixelFormat(pixel_format newformat);
+ int pixelFormatCode(pixel_format pixelformat);
+ pixel_format pixelFormatForPalette( int palette );
+ int pixelFormatDepth(pixel_format pixelformat);
+ QString pixelFormatName(pixel_format pixelformat);
+ QString pixelFormatName(int pixelformat);
+ int detectPixelFormats();
+
+ __u64 signalStandardCode(signal_standard standard);
+ QString signalStandardName(signal_standard standard);
+ QString signalStandardName(int standard);
+ int detectSignalStandards();
+
+ int currentInput();
+ int selectInput(int input);
+ int setInputParameters();
+ int startCapturing();
+ int getFrame();
+ int getFrame(imagebuffer *imgbuffer);
+ int getImage(QImage *qimage);
+ int stopCapturing();
+ int close();
+
+ float getBrightness();
+ float setBrightness(float brightness);
+ float getContrast();
+ float setContrast(float contrast);
+ float getSaturation();
+ float setSaturation(float saturation);
+ float getWhiteness();
+ float setWhiteness(float whiteness);
+ float getHue();
+ float setHue(float Hue);
+
+ bool getAutoBrightnessContrast();
+ bool setAutoBrightnessContrast(bool brightnesscontrast);
+ bool getAutoColorCorrection();
+ bool setAutoColorCorrection(bool colorcorrection);
+ bool getImageAsMirror();
+ bool setImageAsMirror(bool imageasmirror);
+
+ bool canCapture();
+ bool canChromakey();
+ bool canScale();
+ bool canOverlay();
+ bool canRead();
+ bool canAsyncIO();
+ bool canStream();
+
+ QString m_model;
+ QString m_name;
+ size_t m_modelindex; // Defines what's the number of a device when more than 1 device of a given model is present;
+ QString full_filename;
+ videodev_driver m_driver;
+ int descriptor;
+
+//protected:
+#if defined(__linux__) && defined(ENABLE_AV)
+#ifdef V4L2_CAP_VIDEO_CAPTURE
+ struct v4l2_capability V4L2_capabilities;
+ struct v4l2_cropcap cropcap;
+ struct v4l2_crop crop;
+ struct v4l2_format fmt;
+ struct v4l2_fmtdesc fmtdesc; // Not sure if it must be here or inside detectPixelFormats(). Should inve
+// struct v4l2_input m_input;
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_querymenu querymenu;
+void enumerateMenu (void);
+
+#endif
+ struct video_capability V4L_capabilities;
+ struct video_buffer V4L_videobuffer;
+#endif
+ QValueVector<Kopete::AV::VideoInput> m_input;
+ QValueVector<Kopete::AV::VideoControl> m_control;
+// QFile file;
+protected:
+ int currentwidth, minwidth, maxwidth, currentheight, minheight, maxheight;
+
+ bool m_disablemmap;
+ bool m_workaroundbrokendriver;
+
+ QValueVector<rawbuffer> m_rawbuffers;
+ unsigned int m_streambuffers;
+ imagebuffer m_currentbuffer;
+ int m_buffer_size;
+
+ int m_current_input;
+ pixel_format m_pixelformat;
+
+ io_method m_io_method;
+ bool m_videocapture;
+ bool m_videochromakey;
+ bool m_videoscale;
+ bool m_videooverlay;
+ bool m_videoread;
+ bool m_videoasyncio;
+ bool m_videostream;
+
+ int xioctl(int request, void *arg);
+ int errnoReturn(const char* s);
+ int initRead();
+ int initMmap();
+ int initUserptr();
+
+};
+
+}
+
+}
+
+#endif
diff --git a/kopete/libkopete/avdevice/videodevicemodelpool.cpp b/kopete/libkopete/avdevice/videodevicemodelpool.cpp
new file mode 100644
index 00000000..c6fc533e
--- /dev/null
+++ b/kopete/libkopete/avdevice/videodevicemodelpool.cpp
@@ -0,0 +1,68 @@
+/*
+ videodevicepool.h - Kopete Multiple Video Device handler Class
+
+ Copyright (c) 2005-2006 by Cláudio da Silveira Pinheiro <[email protected]>
+
+ Kopete (c) 2002-2003 by the Kopete developers <[email protected]>
+
+ *************************************************************************
+ * *
+ * This library is free software; you can redistribute it and/or *
+ * modify it under the terms of the GNU Lesser General Public *
+ * License as published by the Free Software Foundation; either *
+ * version 2 of the License, or (at your option) any later version. *
+ * *
+ *************************************************************************
+*/
+
+#include "videodevicemodelpool.h"
+
+namespace Kopete {
+
+namespace AV {
+
+VideoDeviceModelPool::VideoDeviceModelPool()
+{
+}
+
+
+VideoDeviceModelPool::~VideoDeviceModelPool()
+{
+}
+
+void VideoDeviceModelPool::clear()
+{
+ m_devicemodel.clear();
+}
+
+size_t VideoDeviceModelPool::size()
+{
+ return m_devicemodel.size();
+}
+
+size_t VideoDeviceModelPool::addModel( QString newmodel )
+{
+ VideoDeviceModel newdevicemodel;
+ newdevicemodel.model=newmodel;
+ newdevicemodel.count=0;
+
+ if(m_devicemodel.size())
+ {
+ for ( size_t loop = 0 ; loop < m_devicemodel.size(); loop++)
+ if (newmodel == m_devicemodel[loop].model)
+ {
+ kdDebug() << k_funcinfo << "Model " << newmodel << " already exists." << endl;
+ m_devicemodel[loop].count++;
+ return m_devicemodel[loop].count;
+ }
+ }
+ m_devicemodel.push_back(newdevicemodel);
+ m_devicemodel[m_devicemodel.size()-1].model = newmodel;
+ m_devicemodel[m_devicemodel.size()-1].count = 0;
+ return 0;
+}
+
+
+}
+
+}
diff --git a/kopete/libkopete/avdevice/videodevicemodelpool.h b/kopete/libkopete/avdevice/videodevicemodelpool.h
new file mode 100644
index 00000000..54d801c4
--- /dev/null
+++ b/kopete/libkopete/avdevice/videodevicemodelpool.h
@@ -0,0 +1,53 @@
+/*
+ videodevicepool.h - Kopete Multiple Video Device handler Class
+
+ Copyright (c) 2005-2006 by Cláudio da Silveira Pinheiro <[email protected]>
+
+ Kopete (c) 2002-2003 by the Kopete developers <[email protected]>
+
+ *************************************************************************
+ * *
+ * This library is free software; you can redistribute it and/or *
+ * modify it under the terms of the GNU Lesser General Public *
+ * License as published by the Free Software Foundation; either *
+ * version 2 of the License, or (at your option) any later version. *
+ * *
+ *************************************************************************
+*/
+
+#ifndef KOPETE_AVVIDEODEVICEMODELPOOL_H
+#define KOPETE_AVVIDEODEVICEMODELPOOL_H
+
+#include <qstring.h>
+#include <qvaluevector.h>
+#include <kdebug.h>
+#include "kopete_export.h"
+
+namespace Kopete {
+
+namespace AV {
+
+/**
+ @author Kopete Developers <[email protected]>
+*/
+class VideoDeviceModelPool{
+
+ struct VideoDeviceModel
+ {
+ QString model;
+ size_t count;
+ };
+ QValueVector<VideoDeviceModel> m_devicemodel;
+public:
+ VideoDeviceModelPool();
+ ~VideoDeviceModelPool();
+ void clear();
+ size_t size();
+ size_t addModel(QString newmodel);
+};
+
+}
+
+}
+
+#endif
diff --git a/kopete/libkopete/avdevice/videodevicepool.cpp b/kopete/libkopete/avdevice/videodevicepool.cpp
new file mode 100644
index 00000000..2651addb
--- /dev/null
+++ b/kopete/libkopete/avdevice/videodevicepool.cpp
@@ -0,0 +1,889 @@
+/*
+ videodevice.cpp - Kopete Video Device Low-level Support
+
+ Copyright (c) 2005-2006 by Cláudio da Silveira Pinheiro <[email protected]>
+
+ Kopete (c) 2002-2003 by the Kopete developers <[email protected]>
+
+ *************************************************************************
+ * *
+ * This library is free software; you can redistribute it and/or *
+ * modify it under the terms of the GNU Lesser General Public *
+ * License as published by the Free Software Foundation; either *
+ * version 2 of the License, or (at your option) any later version. *
+ * *
+ *************************************************************************
+*/
+
+#define ENABLE_AV
+
+#include <assert.h>
+#include <cstdlib>
+#include <cerrno>
+#include <cstring>
+
+#include <kdebug.h>
+#include <klocale.h>
+#include <qdir.h>
+
+#include "videodevice.h"
+#include "videodevicepool.h"
+
+#define CLEAR(x) memset (&(x), 0, sizeof (x))
+
+namespace Kopete {
+
+namespace AV {
+
+VideoDevicePool *VideoDevicePool::s_self = NULL;
+__u64 VideoDevicePool::m_clients = 0;
+
+VideoDevicePool* VideoDevicePool::self()
+{
+ kdDebug(14010) << "libkopete (avdevice): self() called" << endl;
+ if (s_self == NULL)
+ {
+ s_self = new VideoDevicePool;
+ if (s_self)
+ m_clients = 0;
+ }
+ kdDebug(14010) << "libkopete (avdevice): self() exited successfuly. m_clients = " << m_clients << endl;
+ return s_self;
+}
+
+VideoDevicePool::VideoDevicePool()
+{
+}
+
+
+VideoDevicePool::~VideoDevicePool()
+{
+}
+
+
+
+
+/*!
+ \fn VideoDevicePool::open()
+ */
+int VideoDevicePool::open()
+{
+ /// @todo implement me
+
+ m_ready.lock();
+ if(!m_videodevice.size())
+ {
+ kdDebug(14010) << k_funcinfo << "open(): No devices found. Must scan for available devices." << m_current_device << endl;
+ scanDevices();
+ }
+ if(!m_videodevice.size())
+ {
+ kdDebug(14010) << k_funcinfo << "open(): No devices found. bailing out." << m_current_device << endl;
+ m_ready.unlock();
+ return EXIT_FAILURE;
+ }
+ if(m_current_device >= m_videodevice.size())
+ {
+ kdDebug(14010) << k_funcinfo << "open(): Device out of scope (" << m_current_device << "). Defaulting to the first one." << endl;
+ m_current_device = 0;
+ }
+ int isopen = m_videodevice[currentDevice()].open();
+ if ( isopen == EXIT_SUCCESS)
+ {
+ loadConfig(); // Temporary hack. The open() seems to clean the input parameters. Need to find a way to fix it.
+
+ }
+ m_clients++;
+ kdDebug(14010) << k_funcinfo << "Number of clients: " << m_clients << endl;
+ m_ready.unlock();
+ return isopen;
+}
+
+/*!
+ \fn VideoDevicePool::open(int device)
+ */
+int VideoDevicePool::open(unsigned int device)
+{
+ /// @todo implement me
+ kdDebug(14010) << k_funcinfo << "open(" << device << ") called." << endl;
+ if(device >= m_videodevice.size())
+ {
+ kdDebug(14010) << k_funcinfo << "open(" << device <<"): Device does not exist." << endl;
+ return EXIT_FAILURE;
+ }
+ close();
+ kdDebug(14010) << k_funcinfo << "open(" << device << ") Setting m_current_Device to " << device << endl;
+ m_current_device = device;
+ saveConfig();
+ kdDebug(14010) << k_funcinfo << "open(" << device << ") Calling open()." << endl;
+ return open();
+}
+
+bool VideoDevicePool::isOpen()
+{
+ return m_videodevice[currentDevice()].isOpen();
+}
+
+/*!
+ \fn VideoDevicePool::showDeviceCapabilities(int device)
+ */
+int VideoDevicePool::showDeviceCapabilities(unsigned int device)
+{
+ return m_videodevice[device].showDeviceCapabilities();
+}
+
+int VideoDevicePool::width()
+{
+ return m_videodevice[currentDevice()].width();
+}
+
+int VideoDevicePool::minWidth()
+{
+ return m_videodevice[currentDevice()].minWidth();
+}
+
+int VideoDevicePool::maxWidth()
+{
+ return m_videodevice[currentDevice()].maxWidth();
+}
+
+int VideoDevicePool::height()
+{
+ return m_videodevice[currentDevice()].height();
+}
+
+int VideoDevicePool::minHeight()
+{
+ return m_videodevice[currentDevice()].minHeight();
+}
+
+int VideoDevicePool::maxHeight()
+{
+ return m_videodevice[currentDevice()].maxHeight();
+}
+
+int VideoDevicePool::setSize( int newwidth, int newheight)
+{
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].setSize(newwidth, newheight);
+ else
+ {
+ kdDebug(14010) << k_funcinfo << "VideoDevicePool::setSize() fallback for no device." << endl;
+ m_buffer.width=newwidth;
+ m_buffer.height=newheight;
+ m_buffer.pixelformat= PIXELFORMAT_RGB24;
+ m_buffer.data.resize(m_buffer.width*m_buffer.height*3);
+ kdDebug(14010) << k_funcinfo << "VideoDevicePool::setSize() buffer size: "<< m_buffer.data.size() << endl;
+ }
+ return EXIT_SUCCESS;
+}
+
+/*!
+ \fn VideoDevicePool::close()
+ */
+int VideoDevicePool::close()
+{
+ /// @todo implement me
+ if(m_clients)
+ m_clients--;
+ if((currentDevice() < m_videodevice.size())&&(!m_clients))
+ return m_videodevice[currentDevice()].close();
+ if(m_clients)
+ kdDebug(14010) << k_funcinfo << "VideoDevicePool::close() The video device is still in use." << endl;
+ if(currentDevice() >= m_videodevice.size())
+ kdDebug(14010) << k_funcinfo << "VideoDevicePool::close() Current device out of range." << endl;
+ return EXIT_FAILURE;
+}
+
+/*!
+ \fn VideoDevicePool::startCapturing()
+ */
+int VideoDevicePool::startCapturing()
+{
+ kdDebug(14010) << k_funcinfo << "startCapturing() called." << endl;
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].startCapturing();
+ return EXIT_FAILURE;
+}
+
+
+/*!
+ \fn VideoDevicePool::stopCapturing()
+ */
+int VideoDevicePool::stopCapturing()
+{
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].stopCapturing();
+ return EXIT_FAILURE;
+}
+
+// Implementation of the methods that get / set input's adjustment parameters
+/*!
+ \fn VideoDevicePool::getBrightness()
+ */
+float VideoDevicePool::getBrightness()
+{
+ if (currentDevice() < m_videodevice.size() )
+ return m_videodevice[currentDevice()].getBrightness();
+ else
+ return 0;
+}
+
+/*!
+ \fn VideoDevicePool::setBrightness(float brightness)
+ */
+float VideoDevicePool::setBrightness(float brightness)
+{
+ if (currentDevice() < m_videodevice.size() )
+ return m_videodevice[currentDevice()].setBrightness(brightness);
+ else
+ return 0;
+}
+
+/*!
+ \fn VideoDevicePool::getContrast()
+ */
+float VideoDevicePool::getContrast()
+{
+ if (currentDevice() < m_videodevice.size() )
+ return m_videodevice[currentDevice()].getContrast();
+ else
+ return 0;
+}
+
+/*!
+ \fn VideoDevicePool::setContrast(float contrast)
+ */
+float VideoDevicePool::setContrast(float contrast)
+{
+ if (currentDevice() < m_videodevice.size() )
+ return m_videodevice[currentDevice()].setContrast(contrast);
+ else
+ return 0;
+}
+
+/*!
+ \fn VideoDevicePool::getSaturation()
+ */
+float VideoDevicePool::getSaturation()
+{
+ if (currentDevice() < m_videodevice.size() )
+ return m_videodevice[currentDevice()].getSaturation();
+ else
+ return 0;
+}
+
+/*!
+ \fn VideoDevicePool::setSaturation(float saturation)
+ */
+float VideoDevicePool::setSaturation(float saturation)
+{
+ if (currentDevice() < m_videodevice.size() )
+ return m_videodevice[currentDevice()].setSaturation(saturation);
+ else
+ return 0;
+}
+
+/*!
+ \fn VideoDevicePool::getWhiteness()
+ */
+float VideoDevicePool::getWhiteness()
+{
+ if (currentDevice() < m_videodevice.size() )
+ return m_videodevice[currentDevice()].getWhiteness();
+ else
+ return 0;
+}
+
+/*!
+ \fn VideoDevicePool::setWhiteness(float whiteness)
+ */
+float VideoDevicePool::setWhiteness(float whiteness)
+{
+ if (currentDevice() < m_videodevice.size() )
+ return m_videodevice[currentDevice()].setWhiteness(whiteness);
+ else
+ return 0;
+}
+
+/*!
+ \fn VideoDevicePool::getHue()
+ */
+float VideoDevicePool::getHue()
+{
+ if (currentDevice() < m_videodevice.size() )
+ return m_videodevice[currentDevice()].getHue();
+ else
+ return 0;
+}
+
+/*!
+ \fn VideoDevicePool::setHue(float hue)
+ */
+float VideoDevicePool::setHue(float hue)
+{
+ if (currentDevice() < m_videodevice.size() )
+ return m_videodevice[currentDevice()].setHue(hue);
+ else
+ return 0;
+}
+
+/*!
+ \fn VideoDevicePool::getAutoBrightnessContrast()
+ */
+bool VideoDevicePool::getAutoBrightnessContrast()
+{
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].getAutoBrightnessContrast();
+ return false;
+}
+
+/*!
+ \fn VideoDevicePool::setAutoBrightnessContrast(bool brightnesscontrast)
+ */
+bool VideoDevicePool::setAutoBrightnessContrast(bool brightnesscontrast)
+{
+ kdDebug(14010) << k_funcinfo << "VideoDevicePool::setAutoBrightnessContrast(" << brightnesscontrast << ") called." << endl;
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].setAutoBrightnessContrast(brightnesscontrast);
+ return false;
+}
+
+/*!
+ \fn VideoDevicePool::getAutoColorCorrection()
+ */
+bool VideoDevicePool::getAutoColorCorrection()
+{
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].getAutoColorCorrection();
+ return false;
+}
+
+/*!
+ \fn VideoDevicePool::setAutoColorCorrection(bool colorcorrection)
+ */
+bool VideoDevicePool::setAutoColorCorrection(bool colorcorrection)
+{
+ kdDebug(14010) << k_funcinfo << "VideoDevicePool::setAutoColorCorrection(" << colorcorrection << ") called." << endl;
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].setAutoColorCorrection(colorcorrection);
+ return false;
+}
+
+/*!
+ \fn VideoDevicePool::getIMageAsMirror()
+ */
+bool VideoDevicePool::getImageAsMirror()
+{
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].getImageAsMirror();
+ return false;
+}
+
+/*!
+ \fn VideoDevicePool::setImageAsMirror(bool imageasmirror)
+ */
+bool VideoDevicePool::setImageAsMirror(bool imageasmirror)
+{
+ kdDebug(14010) << k_funcinfo << "VideoDevicePool::setImageAsMirror(" << imageasmirror << ") called." << endl;
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].setImageAsMirror(imageasmirror);
+ return false;
+}
+
+/*!
+ \fn VideoDevicePool::getFrame()
+ */
+int VideoDevicePool::getFrame()
+{
+// kdDebug(14010) << k_funcinfo << "VideoDevicePool::getFrame() called." << endl;
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].getFrame();
+ else
+ {
+ kdDebug(14010) << k_funcinfo << "VideoDevicePool::getFrame() fallback for no device." << endl;
+ for(unsigned int loop=0; loop < m_buffer.data.size(); loop+=3)
+ {
+ m_buffer.data[loop] = 255;
+ m_buffer.data[loop+1] = 0;
+ m_buffer.data[loop+2] = 0;
+ }
+ }
+// kdDebug(14010) << k_funcinfo << "VideoDevicePool::getFrame() exited successfuly." << endl;
+ return EXIT_SUCCESS;
+}
+
+/*!
+ \fn VideoDevicePool::getQImage(QImage *qimage)
+ */
+int VideoDevicePool::getImage(QImage *qimage)
+{
+// kdDebug(14010) << k_funcinfo << "VideoDevicePool::getImage() called." << endl;
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].getImage(qimage);
+ else
+ {
+ kdDebug(14010) << k_funcinfo << "VideoDevicePool::getImage() fallback for no device." << endl;
+ qimage->create(m_buffer.width, m_buffer.height,32, QImage::IgnoreEndian);
+ uchar *bits=qimage->bits();
+ switch(m_buffer.pixelformat)
+ {
+ case PIXELFORMAT_NONE : break;
+ case PIXELFORMAT_GREY : break;
+ case PIXELFORMAT_RGB332 : break;
+ case PIXELFORMAT_RGB555 : break;
+ case PIXELFORMAT_RGB555X: break;
+ case PIXELFORMAT_RGB565 : break;
+ case PIXELFORMAT_RGB565X: break;
+ case PIXELFORMAT_RGB24 :
+ {
+ kdDebug(14010) << k_funcinfo << "VideoDevicePool::getImage() fallback for no device - RGB24." << endl;
+ int step=0;
+ for(int loop=0;loop < qimage->numBytes();loop+=4)
+ {
+ bits[loop] = m_buffer.data[step];
+ bits[loop+1] = m_buffer.data[step+1];
+ bits[loop+2] = m_buffer.data[step+2];
+ bits[loop+3] = 255;
+ step+=3;
+ }
+ }
+ break;
+ case PIXELFORMAT_BGR24 : break;
+ {
+ int step=0;
+ for(int loop=0;loop < qimage->numBytes();loop+=4)
+ {
+ bits[loop] = m_buffer.data[step+2];
+ bits[loop+1] = m_buffer.data[step+1];
+ bits[loop+2] = m_buffer.data[step];
+ bits[loop+3] = 255;
+ step+=3;
+ }
+ }
+ break;
+ case PIXELFORMAT_RGB32 : memcpy(bits,&m_buffer.data[0], m_buffer.data.size());
+ break;
+ case PIXELFORMAT_BGR32 : break;
+ }
+ }
+ kdDebug(14010) << k_funcinfo << "VideoDevicePool::getImage() exited successfuly." << endl;
+ return EXIT_SUCCESS;
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::selectInput(int input)
+ */
+int VideoDevicePool::selectInput(int newinput)
+{
+ kdDebug(14010) << k_funcinfo << "VideoDevicePool::selectInput(" << newinput << ") called." << endl;
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].selectInput(newinput);
+ else
+ return 0;
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::setInputParameters()
+ */
+int VideoDevicePool::setInputParameters()
+{
+ if(m_videodevice.size())
+ return m_videodevice[currentDevice()].setInputParameters();
+ else
+ return EXIT_FAILURE;
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::fillDeviceKComboBox(KComboBox *combobox)
+ */
+int VideoDevicePool::fillDeviceKComboBox(KComboBox *combobox)
+{
+ /// @todo implement me
+ kdDebug(14010) << k_funcinfo << "fillInputKComboBox: Called." << endl;
+ combobox->clear();
+ if(m_videodevice.size())
+ {
+ for (unsigned int loop=0; loop < m_videodevice.size(); loop++)
+ {
+ combobox->insertItem(m_videodevice[loop].m_name);
+ kdDebug(14010) << k_funcinfo << "DeviceKCombobox: Added device " << loop << ": " << m_videodevice[loop].m_name << endl;
+ }
+ combobox->setCurrentItem(currentDevice());
+ return EXIT_SUCCESS;
+ }
+ return EXIT_FAILURE;
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::fillInputKComboBox(KComboBox *combobox)
+ */
+int VideoDevicePool::fillInputKComboBox(KComboBox *combobox)
+{
+ /// @todo implement me
+ kdDebug(14010) << k_funcinfo << "fillInputKComboBox: Called." << endl;
+ combobox->clear();
+ if(m_videodevice.size())
+ {
+ if(m_videodevice[currentDevice()].inputs()>0)
+ {
+ for (unsigned int loop=0; loop < m_videodevice[currentDevice()].inputs(); loop++)
+ {
+ combobox->insertItem(m_videodevice[currentDevice()].m_input[loop].name);
+ kdDebug(14010) << k_funcinfo << "InputKCombobox: Added input " << loop << ": " << m_videodevice[currentDevice()].m_input[loop].name << " (tuner: " << m_videodevice[currentDevice()].m_input[loop].hastuner << ")" << endl;
+ }
+ combobox->setCurrentItem(currentInput());
+ return EXIT_SUCCESS;
+ }
+ }
+ return EXIT_FAILURE;
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::fillStandardKComboBox(KComboBox *combobox)
+ */
+int VideoDevicePool::fillStandardKComboBox(KComboBox *combobox)
+{
+ /// @todo implement me
+ kdDebug(14010) << k_funcinfo << "fillInputKComboBox: Called." << endl;
+ combobox->clear();
+ if(m_videodevice.size())
+ {
+ if(m_videodevice[currentDevice()].inputs()>0)
+ {
+ for (unsigned int loop=0; loop < 25; loop++)
+ {
+ if ( (m_videodevice[currentDevice()].m_input[currentInput()].m_standards) & (1 << loop) )
+ combobox->insertItem(m_videodevice[currentDevice()].signalStandardName( 1 << loop));
+/*
+ case STANDARD_PAL_B1 : return V4L2_STD_PAL_B1; break;
+ case STANDARD_PAL_G : return V4L2_STD_PAL_G; break;
+ case STANDARD_PAL_H : return V4L2_STD_PAL_H; break;
+ case STANDARD_PAL_I : return V4L2_STD_PAL_I; break;
+ case STANDARD_PAL_D : return V4L2_STD_PAL_D; break;
+ case STANDARD_PAL_D1 : return V4L2_STD_PAL_D1; break;
+ case STANDARD_PAL_K : return V4L2_STD_PAL_K; break;
+ case STANDARD_PAL_M : return V4L2_STD_PAL_M; break;
+ case STANDARD_PAL_N : return V4L2_STD_PAL_N; break;
+ case STANDARD_PAL_Nc : return V4L2_STD_PAL_Nc; break;
+ case STANDARD_PAL_60 : return V4L2_STD_PAL_60; break;
+ case STANDARD_NTSC_M : return V4L2_STD_NTSC_M; break;
+ case STANDARD_NTSC_M_JP : return V4L2_STD_NTSC_M_JP; break;
+ case STANDARD_NTSC_443 : return V4L2_STD_NTSC; break; // Using workaround value because my videodev2.h header seems to not include this standard in struct __u64 v4l2_std_id
+ case STANDARD_SECAM_B : return V4L2_STD_SECAM_B; break;
+ case STANDARD_SECAM_D : return V4L2_STD_SECAM_D; break;
+ case STANDARD_SECAM_G : return V4L2_STD_SECAM_G; break;
+ case STANDARD_SECAM_H : return V4L2_STD_SECAM_H; break;
+ case STANDARD_SECAM_K : return V4L2_STD_SECAM_K; break;
+ case STANDARD_SECAM_K1 : return V4L2_STD_SECAM_K1; break;
+ case STANDARD_SECAM_L : return V4L2_STD_SECAM_L; break;
+ case STANDARD_SECAM_LC : return V4L2_STD_SECAM; break; // Using workaround value because my videodev2.h header seems to not include this standard in struct __u64 v4l2_std_id
+ case STANDARD_ATSC_8_VSB : return V4L2_STD_ATSC_8_VSB; break; // ATSC/HDTV Standard officially not supported by V4L2 but exists in videodev2.h
+ case STANDARD_ATSC_16_VSB : return V4L2_STD_ATSC_16_VSB; break; // ATSC/HDTV Standard officially not supported by V4L2 but exists in videodev2.h
+ case STANDARD_PAL_BG : return V4L2_STD_PAL_BG; break;
+ case STANDARD_PAL_DK : return V4L2_STD_PAL_DK; break;
+ case STANDARD_PAL : return V4L2_STD_PAL; break;
+ case STANDARD_NTSC : return V4L2_STD_NTSC; break;
+ case STANDARD_SECAM_DK : return V4L2_STD_SECAM_DK; break;
+ case STANDARD_SECAM : return V4L2_STD_SECAM; break;
+ case STANDARD_525_60 : return V4L2_STD_525_60; break;
+ case STANDARD_625_50 : return V4L2_STD_625_50; break;
+ case STANDARD_ALL : return V4L2_STD_ALL; break;
+
+ combobox->insertItem(m_videodevice[currentDevice()].m_input[loop].name);
+ kdDebug(14010) << k_funcinfo << "StandardKCombobox: Added input " << loop << ": " << m_videodevice[currentDevice()].m_input[loop].name << " (tuner: " << m_videodevice[currentDevice()].m_input[loop].hastuner << ")" << endl;*/
+ }
+ combobox->setCurrentItem(currentInput());
+ return EXIT_SUCCESS;
+ }
+ }
+ return EXIT_FAILURE;
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::scanDevices()
+ */
+int VideoDevicePool::scanDevices()
+{
+ /// @todo implement me
+
+ kdDebug(14010) << k_funcinfo << "called" << endl;
+#if defined(__linux__) && defined(ENABLE_AV)
+ QDir videodevice_dir;
+ const QString videodevice_dir_path=QString::fromLocal8Bit("/dev/v4l/");
+ const QString videodevice_dir_filter=QString::fromLocal8Bit("video*");
+ VideoDevice videodevice;
+
+ m_videodevice.clear();
+ m_modelvector.clear();
+
+ videodevice_dir.setPath(videodevice_dir_path);
+ videodevice_dir.setNameFilter(videodevice_dir_filter);
+ videodevice_dir.setFilter( QDir::System | QDir::NoSymLinks | QDir::Readable | QDir::Writable );
+ videodevice_dir.setSorting( QDir::Name );
+
+ kdDebug(14010) << k_funcinfo << "Looking for devices in " << videodevice_dir_path << endl;
+ const QFileInfoList *list = videodevice_dir.entryInfoList();
+
+ if (!list)
+ {
+ kdDebug(14010) << k_funcinfo << "Found no suitable devices in " << videodevice_dir_path << endl;
+ QDir videodevice_dir;
+ const QString videodevice_dir_path=QString::fromLocal8Bit("/dev/");
+ const QString videodevice_dir_filter=QString::fromLocal8Bit("video*");
+ VideoDevice videodevice;
+
+ videodevice_dir.setPath(videodevice_dir_path);
+ videodevice_dir.setNameFilter(videodevice_dir_filter);
+ videodevice_dir.setFilter( QDir::System | QDir::NoSymLinks | QDir::Readable | QDir::Writable );
+ videodevice_dir.setSorting( QDir::Name );
+
+ kdDebug(14010) << k_funcinfo << "Looking for devices in " << videodevice_dir_path << endl;
+ const QFileInfoList *list = videodevice_dir.entryInfoList();
+
+ if (!list)
+ {
+ kdDebug(14010) << k_funcinfo << "Found no suitable devices in " << videodevice_dir_path << endl;
+ return EXIT_FAILURE;
+ }
+
+ QFileInfoListIterator fileiterator ( *list );
+ QFileInfo *fileinfo;
+
+ kdDebug(14010) << k_funcinfo << "scanning devices in " << videodevice_dir_path << "..." << endl;
+ while ( (fileinfo = fileiterator.current()) != 0 )
+ {
+ videodevice.setFileName(fileinfo->absFilePath());
+ kdDebug(14010) << k_funcinfo << "Found device " << videodevice.full_filename << endl;
+ videodevice.open(); // It should be opened with O_NONBLOCK (it's a FIFO) but I dunno how to do it using QFile
+ if(videodevice.isOpen())
+ {
+ kdDebug(14010) << k_funcinfo << "File " << videodevice.full_filename << " was opened successfuly" << endl;
+
+// This must be changed to proper code to handle multiple devices of the same model. It currently simply add models without proper checking
+ videodevice.close();
+ videodevice.m_modelindex=m_modelvector.addModel (videodevice.m_model); // Adds device to the device list and sets model number
+ m_videodevice.push_back(videodevice);
+ }
+ ++fileiterator;
+ }
+
+
+ m_current_device = 0;
+ loadConfig();
+ kdDebug(14010) << k_funcinfo << "exited successfuly" << endl;
+ return EXIT_SUCCESS;
+
+ }
+ QFileInfoListIterator fileiterator ( *list );
+ QFileInfo *fileinfo;
+
+ kdDebug(14010) << k_funcinfo << "scanning devices in " << videodevice_dir_path << "..." << endl;
+ while ( (fileinfo = fileiterator.current()) != 0 )
+ {
+ videodevice.setFileName(fileinfo->absFilePath());
+ kdDebug(14010) << k_funcinfo << "Found device " << videodevice.full_filename << endl;
+ videodevice.open(); // It should be opened with O_NONBLOCK (it's a FIFO) but I dunno how to do it using QFile
+ if(videodevice.isOpen())
+ {
+ kdDebug(14010) << k_funcinfo << "File " << videodevice.full_filename << " was opened successfuly" << endl;
+
+// This must be changed to proper code to handle multiple devices of the same model. It currently simply add models without proper checking
+ videodevice.close();
+ videodevice.m_modelindex=m_modelvector.addModel (videodevice.m_model); // Adds device to the device list and sets model number
+ m_videodevice.push_back(videodevice);
+ }
+ ++fileiterator;
+ }
+ m_current_device = 0;
+ loadConfig();
+#endif
+ kdDebug(14010) << k_funcinfo << "exited successfuly" << endl;
+ return EXIT_SUCCESS;
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::hasDevices()
+ */
+bool VideoDevicePool::hasDevices()
+{
+ /// @todo implement me
+ if(m_videodevice.size())
+ return true;
+ return false;
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::size()
+ */
+size_t VideoDevicePool::size()
+{
+ /// @todo implement me
+ return m_videodevice.size();
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::currentDevice()
+ */
+unsigned int VideoDevicePool::currentDevice()
+{
+ /// @todo implement me
+ return m_current_device;
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::currentInput()
+ */
+int VideoDevicePool::currentInput()
+{
+ /// @todo implement me
+ return m_videodevice[currentDevice()].currentInput();
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::currentInput()
+ */
+unsigned int VideoDevicePool::inputs()
+{
+ /// @todo implement me
+ return m_videodevice[currentDevice()].inputs();
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::loadConfig()
+ */
+void VideoDevicePool::loadConfig()
+{
+ /// @todo implement me
+ kdDebug(14010) << k_funcinfo << "called" << endl;
+ if((hasDevices())&&(m_clients==0))
+ {
+ KConfig *config = KGlobal::config();
+ config->setGroup("Video Device Settings");
+ const QString currentdevice = config->readEntry("Current Device", QString::null);
+ kdDebug(14010) << k_funcinfo << "Current device: " << currentdevice << endl;
+
+// m_current_device = 0; // Must check this thing because of the fact that multiple loadConfig in other methodas can do bad things. Watch out!
+
+ VideoDeviceVector::iterator vditerator;
+ for( vditerator = m_videodevice.begin(); vditerator != m_videodevice.end(); ++vditerator )
+ {
+ const QString modelindex = QString::fromLocal8Bit ( "Model %1 Device %2") .arg ((*vditerator).m_name ) .arg ((*vditerator).m_modelindex);
+ if(modelindex == currentdevice)
+ {
+ m_current_device = vditerator - m_videodevice.begin();
+// kdDebug(14010) << k_funcinfo << "This place will be used to set " << modelindex << " as the current device ( " << (vditerator - m_videodevice.begin()) << " )." << endl;
+ }
+ const QString name = config->readEntry((QString::fromLocal8Bit ( "Model %1 Device %2 Name") .arg ((*vditerator).m_name ) .arg ((*vditerator).m_modelindex)), (*vditerator).m_model);
+ const int currentinput = config->readNumEntry((QString::fromLocal8Bit ( "Model %1 Device %2 Current input") .arg ((*vditerator).m_name ) .arg ((*vditerator).m_modelindex)), 0);
+ kdDebug(14010) << k_funcinfo << "Device name: " << name << endl;
+ kdDebug(14010) << k_funcinfo << "Device current input: " << currentinput << endl;
+ (*vditerator).selectInput(currentinput);
+
+ for (size_t input = 0 ; input < (*vditerator).m_input.size(); input++)
+ {
+ const float brightness = config->readDoubleNumEntry((QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 Brightness").arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input)) , 0.5 );
+ const float contrast = config->readDoubleNumEntry((QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 Contrast") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input)) , 0.5 );
+ const float saturation = config->readDoubleNumEntry((QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 Saturation").arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input)) , 0.5 );
+ const float whiteness = config->readDoubleNumEntry((QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 Whiteness") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input)) , 0.5 );
+ const float hue = config->readDoubleNumEntry((QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 Hue") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input)) , 0.5 );
+ const bool autobrightnesscontrast = config->readBoolEntry((QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 AutoBrightnessContrast") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input)) , false );
+ const bool autocolorcorrection = config->readBoolEntry((QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 AutoColorCorrection") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input)) , false );
+ const bool imageasmirror = config->readBoolEntry((QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 mageAsMirror") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input)) , false );
+ (*vditerator).setBrightness(brightness);
+ (*vditerator).setContrast(contrast);
+ (*vditerator).setSaturation(saturation);
+ (*vditerator).setHue(hue);
+ (*vditerator).setAutoBrightnessContrast(autobrightnesscontrast);
+ (*vditerator).setAutoColorCorrection(autocolorcorrection);
+ (*vditerator).setImageAsMirror(imageasmirror);
+ kdDebug(14010) << k_funcinfo << "Brightness:" << brightness << endl;
+ kdDebug(14010) << k_funcinfo << "Contrast :" << contrast << endl;
+ kdDebug(14010) << k_funcinfo << "Saturation:" << saturation << endl;
+ kdDebug(14010) << k_funcinfo << "Whiteness :" << whiteness << endl;
+ kdDebug(14010) << k_funcinfo << "Hue :" << hue << endl;
+ kdDebug(14010) << k_funcinfo << "AutoBrightnessContrast:" << autobrightnesscontrast << endl;
+ kdDebug(14010) << k_funcinfo << "AutoColorCorrection :" << autocolorcorrection << endl;
+ kdDebug(14010) << k_funcinfo << "ImageAsMirror :" << imageasmirror << endl;
+ }
+ }
+ }
+}
+
+/*!
+ \fn Kopete::AV::VideoDevicePool::saveConfig()
+ */
+void VideoDevicePool::saveConfig()
+{
+ /// @todo implement me
+ kdDebug(14010) << k_funcinfo << "called" << endl;
+ if(hasDevices())
+ {
+ KConfig *config = KGlobal::config();
+ config->setGroup("Video Device Settings");
+
+/* if(m_modelvector.size())
+ {
+ VideoDeviceModelPool::m_devicemodel::iterator vmiterator;
+ for( vmiterator = m_modelvector.begin(); vmiterator != m_modelvector.end(); ++vmiterator )
+ {
+ kdDebug(14010) << "Device Model: " << (*vmiterator).model << endl;
+ kdDebug(14010) << "Device Count: " << (*vmiterator).count << endl;
+ }
+ }
+*/
+// Stores what is the current video device in use
+ const QString currentdevice = QString::fromLocal8Bit ( "Model %1 Device %2" ) .arg(m_videodevice[m_current_device].m_model) .arg(m_videodevice[m_current_device].m_modelindex);
+ config->writeEntry( "Current Device", currentdevice);
+
+ VideoDeviceVector::iterator vditerator;
+ for( vditerator = m_videodevice.begin(); vditerator != m_videodevice.end(); ++vditerator )
+ {
+ kdDebug(14010) << "Model:" << (*vditerator).m_model << ":Index:" << (*vditerator).m_modelindex << ":Name:" << (*vditerator).m_name << endl;
+ kdDebug(14010) << "Model:" << (*vditerator).m_model << ":Index:" << (*vditerator).m_modelindex << ":Current input:" << (*vditerator).currentInput() << endl;
+
+// Stores current input for the given video device
+ const QString name = QString::fromLocal8Bit ( "Model %1 Device %2 Name") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex);
+ const QString currentinput = QString::fromLocal8Bit ( "Model %1 Device %2 Current input") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex);
+ config->writeEntry( name, (*vditerator).m_name);
+ config->writeEntry( currentinput, (*vditerator).currentInput());
+
+ for (size_t input = 0 ; input < (*vditerator).m_input.size(); input++)
+ {
+ kdDebug(14010) << "Model:" << (*vditerator).m_model << ":Index:" << (*vditerator).m_modelindex << ":Input:" << input << ":Brightness: " << (*vditerator).m_input[input].getBrightness() << endl;
+ kdDebug(14010) << "Model:" << (*vditerator).m_model << ":Index:" << (*vditerator).m_modelindex << ":Input:" << input << ":Contrast : " << (*vditerator).m_input[input].getContrast() << endl;
+ kdDebug(14010) << "Model:" << (*vditerator).m_model << ":Index:" << (*vditerator).m_modelindex << ":Input:" << input << ":Saturation: " << (*vditerator).m_input[input].getSaturation() << endl;
+ kdDebug(14010) << "Model:" << (*vditerator).m_model << ":Index:" << (*vditerator).m_modelindex << ":Input:" << input << ":Whiteness : " << (*vditerator).m_input[input].getWhiteness() << endl;
+ kdDebug(14010) << "Model:" << (*vditerator).m_model << ":Index:" << (*vditerator).m_modelindex << ":Input:" << input << ":Hue : " << (*vditerator).m_input[input].getHue() << endl;
+ kdDebug(14010) << "Model:" << (*vditerator).m_model << ":Index:" << (*vditerator).m_modelindex << ":Input:" << input << ":Automatic brightness / contrast: " << (*vditerator).m_input[input].getAutoBrightnessContrast() << endl;
+ kdDebug(14010) << "Model:" << (*vditerator).m_model << ":Index:" << (*vditerator).m_modelindex << ":Input:" << input << ":Automatic color correction : " << (*vditerator).m_input[input].getAutoColorCorrection() << endl;
+
+// Stores configuration about each channel
+ const QString brightness = QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 Brightness") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input);
+ const QString contrast = QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 Contrast") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input);
+ const QString saturation = QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 Saturation") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input);
+ const QString whiteness = QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 Whiteness") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input);
+ const QString hue = QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 Hue") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input);
+ const QString autobrightnesscontrast = QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 AutoBrightnessContrast") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input);
+ const QString autocolorcorrection = QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 AutoColorCorrection") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input);
+ const QString imageasmirror = QString::fromLocal8Bit ( "Model %1 Device %2 Input %3 ImageAsMirror") .arg ((*vditerator).m_model ) .arg ((*vditerator).m_modelindex) .arg (input);
+ config->writeEntry( brightness, (*vditerator).m_input[input].getBrightness());
+ config->writeEntry( contrast, (*vditerator).m_input[input].getContrast());
+ config->writeEntry( saturation, (*vditerator).m_input[input].getSaturation());
+ config->writeEntry( whiteness, (*vditerator).m_input[input].getWhiteness());
+ config->writeEntry( hue, (*vditerator).m_input[input].getHue());
+ config->writeEntry( autobrightnesscontrast, (*vditerator).m_input[input].getAutoBrightnessContrast());
+ config->writeEntry( autocolorcorrection, (*vditerator).m_input[input].getAutoColorCorrection());
+ config->writeEntry( imageasmirror, (*vditerator).m_input[input].getImageAsMirror());
+ }
+ }
+ config->sync();
+ kdDebug(14010) << endl;
+ }
+}
+
+
+
+}
+
+}
diff --git a/kopete/libkopete/avdevice/videodevicepool.h b/kopete/libkopete/avdevice/videodevicepool.h
new file mode 100644
index 00000000..1fbdb3e1
--- /dev/null
+++ b/kopete/libkopete/avdevice/videodevicepool.h
@@ -0,0 +1,127 @@
+/*
+ videodevicepool.h - Kopete Multiple Video Device handler Class
+
+ Copyright (c) 2005-2006 by Cláudio da Silveira Pinheiro <[email protected]>
+
+ Kopete (c) 2002-2003 by the Kopete developers <[email protected]>
+
+ *************************************************************************
+ * *
+ * This library is free software; you can redistribute it and/or *
+ * modify it under the terms of the GNU Lesser General Public *
+ * License as published by the Free Software Foundation; either *
+ * version 2 of the License, or (at your option) any later version. *
+ * *
+ *************************************************************************
+*/
+
+#ifndef KOPETE_AVVIDEODEVICE_H
+#define KOPETE_AVVIDEODEVICE_H
+
+#include <qvaluevector.h>
+#include <iostream>
+
+
+#include "videoinput.h"
+#include "videodevicemodelpool.h"
+#include <qstring.h>
+#include <qimage.h>
+#include <qvaluevector.h>
+#include <qmutex.h>
+#include <kcombobox.h>
+#include "videodevice.h"
+#include "kopete_export.h"
+#include <kapplication.h>
+#include <kconfig.h>
+#include <kglobal.h>
+
+namespace Kopete {
+
+namespace AV {
+
+/**
+This class allows kopete to check for the existence, open, configure, test, set parameters, grab frames from and close a given video capture card using the Video4Linux API.
+
+@author Cláudio da Silveira Pinheiro
+*/
+
+typedef QValueVector<Kopete::AV::VideoDevice> VideoDeviceVector;
+
+class VideoDevicePoolPrivate;
+
+class KOPETE_EXPORT VideoDevicePool
+{
+public:
+ static VideoDevicePool* self();
+ int open();
+ int open(unsigned int device);
+ bool isOpen();
+ int getFrame();
+ int width();
+ int minWidth();
+ int maxWidth();
+ int height();
+ int minHeight();
+ int maxHeight();
+ int setSize( int newwidth, int newheight);
+ int close();
+ int startCapturing();
+ int stopCapturing();
+ int readFrame();
+ int getImage(QImage *qimage);
+ int selectInput(int newinput);
+ int setInputParameters();
+ int scanDevices();
+ bool hasDevices();
+ size_t size();
+ ~VideoDevicePool();
+ VideoDeviceVector m_videodevice; // Vector to be filled with found devices
+ VideoDeviceModelPool m_modelvector; // Vector to be filled with unique device models
+ int fillDeviceKComboBox(KComboBox *combobox);
+ int fillInputKComboBox(KComboBox *combobox);
+ int fillStandardKComboBox(KComboBox *combobox);
+ unsigned int currentDevice();
+ int currentInput();
+ unsigned int inputs();
+
+ float getBrightness();
+ float setBrightness(float brightness);
+ float getContrast();
+ float setContrast(float contrast);
+ float getSaturation();
+ float setSaturation(float saturation);
+ float getWhiteness();
+ float setWhiteness(float whiteness);
+ float getHue();
+ float setHue(float hue);
+
+ bool getAutoBrightnessContrast();
+ bool setAutoBrightnessContrast(bool brightnesscontrast);
+ bool getAutoColorCorrection();
+ bool setAutoColorCorrection(bool colorcorrection);
+ bool getImageAsMirror();
+ bool setImageAsMirror(bool imageasmirror);
+
+ void loadConfig(); // Load configuration parameters;
+ void saveConfig(); // Save configuretion parameters;
+
+protected:
+ int xioctl(int request, void *arg);
+ int errnoReturn(const char* s);
+ int showDeviceCapabilities(unsigned int device);
+ void guessDriver();
+ unsigned int m_current_device;
+ struct imagebuffer m_buffer; // only used when no devices were found
+
+ QMutex m_ready;
+private:
+ VideoDevicePool();
+ static VideoDevicePool* s_self;
+ static __u64 m_clients; // Number of instances
+};
+
+}
+
+}
+
+#endif
diff --git a/kopete/libkopete/avdevice/videoinput.cpp b/kopete/libkopete/avdevice/videoinput.cpp
new file mode 100644
index 00000000..5f0f8e58
--- /dev/null
+++ b/kopete/libkopete/avdevice/videoinput.cpp
@@ -0,0 +1,172 @@
+/*
+ videoinput.cpp - Kopete Video Input Class
+
+ Copyright (c) 2005-2006 by Cláudio da Silveira Pinheiro <[email protected]>
+
+ Kopete (c) 2002-2003 by the Kopete developers <[email protected]>
+
+ *************************************************************************
+ * *
+ * This library is free software; you can redistribute it and/or *
+ * modify it under the terms of the GNU Lesser General Public *
+ * License as published by the Free Software Foundation; either *
+ * version 2 of the License, or (at your option) any later version. *
+ * *
+ *************************************************************************
+*/
+
+#include "videoinput.h"
+
+namespace Kopete {
+
+namespace AV {
+
+VideoInput::VideoInput()
+{
+ kdDebug() << k_funcinfo << "Executing Video Input's constructor!!!" << endl;
+ m_brightness = 0.5;
+ m_contrast = 0.5;
+ m_saturation = 0.5;
+ m_hue = 0.5;
+ m_autobrightnesscontrast = false;
+ m_autocolorcorrection = false;
+}
+
+
+VideoInput::~VideoInput()
+{
+}
+
+float VideoInput::getBrightness()
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ return m_brightness;
+}
+
+float VideoInput::setBrightness(float brightness)
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ if ( brightness > 1 )
+ brightness = 1;
+ else
+ if ( brightness < 0 )
+ brightness = 0;
+ m_brightness = brightness;
+ return getBrightness();
+}
+
+float VideoInput::getContrast()
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ return m_contrast;
+}
+
+float VideoInput::setContrast(float contrast)
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ if ( contrast > 1 )
+ contrast = 1;
+ else
+ if ( contrast < 0 )
+ contrast = 0;
+ m_contrast = contrast;
+ return getContrast();
+}
+
+float VideoInput::getSaturation()
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ return m_saturation;
+}
+
+float VideoInput::setSaturation(float saturation)
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ if ( saturation > 1 )
+ saturation = 1;
+ else
+ if ( saturation < 0 )
+ saturation = 0;
+ m_saturation = saturation;
+ return getSaturation();
+}
+
+float VideoInput::getWhiteness()
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ return m_whiteness;
+}
+
+float VideoInput::setWhiteness(float whiteness)
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ if ( whiteness > 1 )
+ whiteness = 1;
+ else
+ if ( whiteness < 0 )
+ whiteness = 0;
+ m_whiteness = whiteness;
+ return getWhiteness();
+}
+
+float VideoInput::getHue()
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ return m_hue;
+}
+
+float VideoInput::setHue(float hue)
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ if ( hue > 1 )
+ hue = 1;
+ else
+ if ( hue < 0 )
+ hue = 0;
+ m_hue = hue;
+ return getHue();
+}
+
+
+bool VideoInput::getAutoBrightnessContrast()
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ return m_autobrightnesscontrast;
+}
+
+bool VideoInput::setAutoBrightnessContrast(bool brightnesscontrast)
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ m_autobrightnesscontrast = brightnesscontrast;
+ return getAutoBrightnessContrast();
+}
+
+bool VideoInput::getAutoColorCorrection()
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ return m_autocolorcorrection;
+}
+
+bool VideoInput::setAutoColorCorrection(bool colorcorrection)
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ m_autocolorcorrection = colorcorrection;
+ return getAutoColorCorrection();
+}
+
+bool VideoInput::getImageAsMirror()
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ return m_imageasmirror;
+}
+
+bool VideoInput::setImageAsMirror(bool imageasmirror)
+{
+// kdDebug() << k_funcinfo << " called." << endl;
+ m_imageasmirror = imageasmirror;
+ return getImageAsMirror();
+}
+
+}
+
+}
diff --git a/kopete/libkopete/avdevice/videoinput.h b/kopete/libkopete/avdevice/videoinput.h
new file mode 100644
index 00000000..3381663e
--- /dev/null
+++ b/kopete/libkopete/avdevice/videoinput.h
@@ -0,0 +1,89 @@
+/*
+ videodevice.h - Kopete Video Input Class
+
+ Copyright (c) 2005-2006 by Cláudio da Silveira Pinheiro <[email protected]>
+
+ Kopete (c) 2002-2003 by the Kopete developers <[email protected]>
+
+ *************************************************************************
+ * *
+ * This library is free software; you can redistribute it and/or *
+ * modify it under the terms of the GNU Lesser General Public *
+ * License as published by the Free Software Foundation; either *
+ * version 2 of the License, or (at your option) any later version. *
+ * *
+ *************************************************************************
+*/
+
+#define ENABLE_AV
+
+#ifndef KOPETE_AVVIDEOINPUT_H
+#define KOPETE_AVVIDEOINPUT_H
+
+#ifdef __linux__
+#include <asm/types.h>
+#undef __STRICT_ANSI__
+#endif // __linux__
+#ifndef __u64 //required by videodev.h
+#define __u64 unsigned long long
+#endif // __u64*/
+
+#include <qstring.h>
+#include <kdebug.h>
+#include <qvaluevector.h>
+#include "kopete_export.h"
+
+#include "videocontrol.h"
+
+namespace Kopete {
+
+namespace AV {
+
+/**
+@author Kopete Developers
+*/
+class KOPETE_EXPORT VideoInput{
+public:
+ VideoInput();
+ ~VideoInput();
+ QString name;
+ int hastuner;
+ __u64 m_standards;
+
+
+ float getBrightness();
+ float setBrightness(float brightness);
+ float getContrast();
+ float setContrast(float contrast);
+ float getSaturation();
+ float setSaturation(float saturation);
+ float getWhiteness();
+ float setWhiteness(float whiteness);
+ float getHue();
+ float setHue(float Hue);
+ bool getAutoBrightnessContrast();
+ bool setAutoBrightnessContrast(bool brightnesscontrast);
+ bool getAutoColorCorrection();
+ bool setAutoColorCorrection(bool colorcorrection);
+ bool getImageAsMirror();
+ bool setImageAsMirror(bool imageasmirror);
+
+protected:
+ QValueVector<VideoControl> m_control;
+ float m_brightness;
+ float m_contrast;
+ float m_saturation;
+ float m_whiteness;
+ float m_hue;
+ bool m_autobrightnesscontrast;
+ bool m_autocolorcorrection;
+ bool m_imageasmirror;
+
+
+};
+
+}
+
+}
+
+#endif