diff --git a/MANIFEST b/MANIFEST index 136c8d9..9bbff92 100644 --- a/MANIFEST +++ b/MANIFEST @@ -3,4 +3,4 @@ capture_picture.py capture_picture_delayed.py list_devices.py setup.py -v4l2capture.c +v4l2capture.cpp diff --git a/README b/README index 26f5501..7efe3be 100644 --- a/README +++ b/README @@ -1,20 +1,17 @@ -python-v4l2capture 1.4.x -Python extension to capture video with video4linux2 +libvideolive +Capture and stream video in python 2009, 2010, 2011 Fredrik Portstrom 2011 Joakim Gebart +2013 Tim Sheerman-Chase -I, the copyright holder of this file, hereby release it into the -public domain. This applies worldwide. In case this is not legally -possible: I grant anyone the right to use this work for any purpose, -without any conditions, unless such conditions are required by law. +This software may be used and redistributed under the terms of the GPL v2 (or later) license. Introduction ============ -python-v4l2capture is a slim and easy to use Python extension for -capturing video with video4linux2. It supports libv4l to convert any -image format to RGB or YUV420. +libvideolive is a slim and easy to use Python extension for +capturing and streaming video. this fork of python-v4l2capture: https://github.com/gebart/python-v4l2capture @@ -22,36 +19,38 @@ original python-v4l2capture: http://fredrik.jemla.eu/v4l2capture libv4l: http://freshmeat.net/projects/libv4l -Installation -============ +Installation on Linux +===================== + +libvideolive on linux requires libv4l by default. You can do so by erasing ', libraries = ["v4l2"]' +in setup.py and erasing '#define USE_LIBV4L' in v4l2capture.cpp. + +libvideolive uses distutils to compile. For Linux: + +./setup.py build +sudo ./setup.py install + +Installation on Windows +===================== -v4l2capture requires libv4l by default. You can compile v4l2capture -without libv4l, but that reduces image format support to YUYV input -and RGB output only. You can do so by erasing ', libraries = ["v4l2"]' -in setup.py and erasing '#define USE_LIBV4L' in v4l2capture.c. +Modify setup.py to specify locations of libjpeg and pthread external libraries. -python-v4l2capture uses distutils. -To build: ./setup.py build -To build and install: ./setup.py install +With Visual Studio 2010: + +SET VS90COMNTOOLS=%VS100COMNTOOLS% +python setup.py build -c msvc +python setup.py install + +Remember to put the libjpeg and pthread dlls somewhere appropriate. Example ======= -See capture_picture.py, capture_picture_delayed.py and list_devices.py. +To do Change log ========== (see git log for latest changes) -1.4 (2011-03-18) - Added support for YUV420 output. - -1.3 (2010-07-21) - Added set of capabilities to the return value of - get_info. Updated list_devices.py. - -1.2 (2010-04-01) - Forked example script into capture_picture.py and - capture_picture_delayed.py. - -1.1 (2009-11-03) - Updated URL and documentation. -1.0 (2009-02-28) - Initial release. diff --git a/base.h b/base.h new file mode 100644 index 0000000..28929f4 --- /dev/null +++ b/base.h @@ -0,0 +1,91 @@ + +#ifndef BASE_H +#define BASE_H + +#include + +class FrameMetaData +{ +public: + std::string fmt; + int width; + int height; + unsigned buffLen; + unsigned long sequence; + unsigned long tv_sec; + unsigned long tv_usec; + + FrameMetaData() + { + width = 0; + height = 0; + buffLen = 0; + sequence = 0; + tv_sec = 0; + tv_usec = 0; + } + + FrameMetaData(const FrameMetaData &in) + { + FrameMetaData::operator=(in); + } + + const FrameMetaData &operator=(const FrameMetaData &in) + { + width = in.width; + height = in.height; + fmt = in.fmt; + buffLen = in.buffLen; + sequence = in.sequence; + tv_sec = in.tv_sec; + tv_usec = in.tv_usec; + return *this; + } + +}; + +class Base_Video_In +{ +public: + Base_Video_In() {}; + virtual ~Base_Video_In() {}; + + virtual void Stop() {}; + virtual void WaitForStop() {}; + virtual void OpenDevice() {}; + virtual void SetFormat(const char *fmt, int width, int height) {}; + virtual void StartDevice(int buffer_count) {}; + virtual void StopDevice() {}; + virtual void CloseDevice() {}; + virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) {return 0;}; + + void Run() {}; +}; + +// ********************************************************************** + +class Base_Video_Out +{ +public: + Base_Video_Out() {}; + virtual ~Base_Video_Out() {}; + + virtual void SendFrame(const char *imgIn, + unsigned imgLen, + const char *pxFmt, + int width, + int height, + unsigned long tv_sec = 0, + unsigned long tv_usec = 0) {}; + virtual void Stop() {}; + virtual int WaitForStop() {return 1;}; + virtual void SetOutputSize(int width, int height) {}; + virtual void SetOutputPxFmt(const char *fmt) {}; + virtual void SetFrameRate(unsigned int frameRateIn) {}; + virtual void SetVideoCodec(const char *codec, unsigned int bitrate) {}; + + void Run() {}; +}; + +#endif //BASE_H + diff --git a/capture_picture.py b/capture_picture.py deleted file mode 100755 index 0fec3bb..0000000 --- a/capture_picture.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/python -# -# python-v4l2capture -# -# This file is an example on how to capture a picture with -# python-v4l2capture. -# -# 2009, 2010 Fredrik Portstrom -# -# I, the copyright holder of this file, hereby release it into the -# public domain. This applies worldwide. In case this is not legally -# possible: I grant anyone the right to use this work for any -# purpose, without any conditions, unless such conditions are -# required by law. - -import Image -import select -import v4l2capture - -# Open the video device. -video = v4l2capture.Video_device("/dev/video0") - -# Suggest an image size to the device. The device may choose and -# return another size if it doesn't support the suggested one. -size_x, size_y = video.set_format(1280, 1024) - -# Create a buffer to store image data in. This must be done before -# calling 'start' if v4l2capture is compiled with libv4l2. Otherwise -# raises IOError. -video.create_buffers(1) - -# Send the buffer to the device. Some devices require this to be done -# before calling 'start'. -video.queue_all_buffers() - -# Start the device. This lights the LED if it's a camera that has one. -video.start() - -# Wait for the device to fill the buffer. -select.select((video,), (), ()) - -# The rest is easy :-) -image_data = video.read() -video.close() -image = Image.fromstring("RGB", (size_x, size_y), image_data) -image.save("image.jpg") -print "Saved image.jpg (Size: " + str(size_x) + " x " + str(size_y) + ")" diff --git a/capture_picture_delayed.py b/capture_picture_delayed.py deleted file mode 100755 index 9597d7b..0000000 --- a/capture_picture_delayed.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/python -# -# python-v4l2capture -# -# This file is an example on how to capture a picture with -# python-v4l2capture. It waits between starting the video device and -# capturing the picture, to get a good picture from cameras that -# require a delay to get enough brightness. It does not work with some -# devices that require starting to capture pictures immediatly when -# the device is started. -# -# 2009, 2010 Fredrik Portstrom -# -# I, the copyright holder of this file, hereby release it into the -# public domain. This applies worldwide. In case this is not legally -# possible: I grant anyone the right to use this work for any -# purpose, without any conditions, unless such conditions are -# required by law. - -import Image -import select -import time -import v4l2capture - -# Open the video device. -video = v4l2capture.Video_device("/dev/video0") - -# Suggest an image size to the device. The device may choose and -# return another size if it doesn't support the suggested one. -size_x, size_y = video.set_format(1280, 1024) - -# Create a buffer to store image data in. This must be done before -# calling 'start' if v4l2capture is compiled with libv4l2. Otherwise -# raises IOError. -video.create_buffers(1) - -# Start the device. This lights the LED if it's a camera that has one. -video.start() - -# Wait a little. Some cameras take a few seconds to get bright enough. -time.sleep(2) - -# Send the buffer to the device. -video.queue_all_buffers() - -# Wait for the device to fill the buffer. -select.select((video,), (), ()) - -# The rest is easy :-) -image_data = video.read() -video.close() -image = Image.fromstring("RGB", (size_x, size_y), image_data) -image.save("image.jpg") -print "Saved image.jpg (Size: " + str(size_x) + " x " + str(size_y) + ")" diff --git a/gpl-2.0.txt b/gpl-2.0.txt new file mode 100644 index 0000000..d159169 --- /dev/null +++ b/gpl-2.0.txt @@ -0,0 +1,339 @@ + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. diff --git a/libvideolive.cpp b/libvideolive.cpp new file mode 100644 index 0000000..aa88c91 --- /dev/null +++ b/libvideolive.cpp @@ -0,0 +1,312 @@ +// libvideolive +// Python extension to capture and stream video +// +// 2009, 2010, 2011 Fredrik Portstrom, released into the public domain +// 2011, Joakim Gebart +// 2013, Tim Sheerman-Chase +// See README for license + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "pixfmt.h" +#include "videoout.h" +#include "videoin.h" +#include "videooutfile.h" +#include "pixfmt.h" + +// ********************************************************************* + +PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) +{ + /* This converts an MJPEG frame into a standard JPEG binary + MJPEG images omit the huffman table if the standard table + is used. If it is missing, this function adds the table + into the file structure. */ + + if(PyTuple_Size(args) < 1) + { + PyErr_SetString(PyExc_TypeError, "Function requires 1 argument"); + return NULL; + } + + PyObject *inBuffer = PyTuple_GetItem(args, 0); + + if(!PyString_Check(inBuffer)) + { + PyErr_SetString(PyExc_TypeError, "Argument 1 must be a string."); + //PyObject* type = PyObject_Type(inBuffer); + //PyObject_Print(type, stdout, Py_PRINT_RAW); + //Py_CLEAR(type); + + return NULL; + } + + unsigned char* inBufferPtr = (unsigned char*)PyString_AsString(inBuffer); + Py_ssize_t inBufferLen = PyString_Size(inBuffer); + std::string outBuffer; + + InsertHuffmanTableCTypes((unsigned char*)inBufferPtr, inBufferLen, outBuffer); + + PyObject *outBufferPy = PyString_FromStringAndSize(outBuffer.c_str(), outBuffer.length()); + + return outBufferPy; +} + +PyObject *DecodeAndResizeFrameHighLevel(PyObject *self, PyObject *args) +{ + if (PyErr_Occurred() != NULL) + throw std::runtime_error("Python error set with unexpected state."); + + //0 string src pixFormat + //1 int src width + //2 int src height + //3 ByteArray src data + //4 string out pixFormat + //5 int out width + //6 int out height + //7 ByteArray out data + + if(PyTuple_Size(args) < 8) + { + PyErr_SetString(PyExc_TypeError, "Function requires 8 arguments (and 1 optional)"); + return NULL; + } + + //Input image + PyObject *inPixFmt = PyTuple_GetItem(args, 0); + if(!PyString_Check(inPixFmt)) {PyErr_SetString(PyExc_TypeError, "Argument 1 must be a string."); return NULL;} + PyObject *inWidth = PyTuple_GetItem(args, 1); + if(!PyInt_Check(inWidth)) {PyErr_SetString(PyExc_TypeError, "Argument 2 must be an int."); return NULL;} + PyObject *inHeight = PyTuple_GetItem(args, 2); + if(!PyInt_Check(inHeight)) {PyErr_SetString(PyExc_TypeError, "Argument 3 must be an int."); return NULL;} + PyObject *inData = PyTuple_GetItem(args, 3); + if(!PyByteArray_Check(inData)) {PyErr_SetString(PyExc_TypeError, "Argument 4 must be a byte array."); return NULL;} + + //Output image + PyObject *outPixFmt = PyTuple_GetItem(args, 4); + if(!PyString_Check(outPixFmt)) {PyErr_SetString(PyExc_TypeError, "Argument 5 must be a string."); return NULL;} + PyObject *outWidth = PyTuple_GetItem(args, 5); + if(!PyInt_Check(outWidth)) {PyErr_SetString(PyExc_TypeError, "Argument 6 must be an int."); return NULL;} + PyObject *outHeight = PyTuple_GetItem(args, 6); + if(!PyInt_Check(outHeight)) {PyErr_SetString(PyExc_TypeError, "Argument 7 must be an int."); return NULL;} + PyObject *outData = PyTuple_GetItem(args, 7); + if(!PyByteArray_Check(outData)) {PyErr_SetString(PyExc_TypeError, "Argument 8 must be a byte array."); return NULL;} + + //Optional arguments + PyObject *metaOut = NULL; + if(PyTuple_Size(args) >= 9) + { + metaOut = PyTuple_GetItem(args, 8); + if(!PyDict_Check(metaOut) && metaOut != Py_None) + { + PyErr_SetString(PyExc_TypeError, "Argument 9 (if set) must be a dict or None."); + return NULL; + } + if(metaOut==Py_None) + metaOut = NULL; + } + + unsigned char *buffOut = NULL; + unsigned buffOutLen = 0; + int useExistingBuff = 0; + if(PyByteArray_Size(outData) > 0) + { + buffOut = (unsigned char *)PyString_AsString(outData); + buffOutLen = PyByteArray_Size(outData); + useExistingBuff = 1; + } + + int ret = 0; + try + { + int outWidthInt = PyInt_AsLong(outWidth); + int outHeightInt = PyInt_AsLong(outHeight); + + unsigned char *inDataC = (unsigned char*)PyByteArray_AsString(inData); + long inDataLen = PyByteArray_Size(inData); + char *inPixFmtC = PyString_AsString(inPixFmt); + char *outPixFmtC = PyString_AsString(outPixFmt); + + ret = DecodeAndResizeFrame(inDataC, + inDataLen, + inPixFmtC, + PyInt_AsLong(inWidth), PyInt_AsLong(inHeight), + outPixFmtC, + &buffOut, + &buffOutLen, + outWidthInt, + outHeightInt); + + if(metaOut!=NULL && ret > 0) + { + PyDict_SetItemString(metaOut, "width", PyInt_FromLong(outWidthInt)); + PyDict_SetItemString(metaOut, "height", PyInt_FromLong(outHeightInt)); + PyDict_SetItemString(metaOut, "format", PyString_FromString(outPixFmtC)); + } + + } + catch(std::exception &err) + { + PyErr_SetString(PyExc_RuntimeError, err.what()); + return NULL; + } + + if(!useExistingBuff && ret > 0) + { + PyByteArray_Resize(outData, buffOutLen); + memcpy(PyByteArray_AsString(outData), buffOut, buffOutLen); + delete [] buffOut; + } + + if (PyErr_Occurred() != NULL) + throw std::runtime_error("Python error set with unexpected state."); + + return PyInt_FromLong(ret); +} + + +// ********************************************************************* + +static PyMethodDef Device_manager_methods[] = { + {"open", (PyCFunction)Device_manager_open, METH_VARARGS, + "open(dev = '\\dev\\video0')\n\n" + "Open video capture."}, + {"set_format", (PyCFunction)Device_manager_set_format, METH_VARARGS, + "set_format(dev, size_x, size_y, pixel_format='RGB24') -> size_x, size_y\n\n" + "Request the video device to set image size and format. The device may " + "choose another size than requested and will return its choice. The " + "pixel format may be either RGB24, YUV420 or MJPEG."}, + {"start", (PyCFunction)Device_manager_Start, METH_VARARGS, + "start(dev = '\\dev\\video0', reqSize=(640, 480), reqFps = 30, fmt = 'MJPEG\', buffer_count = 10)\n\n" + "Start video capture."}, + {"get_frame", (PyCFunction)Device_manager_Get_frame, METH_VARARGS, + "start(dev = '\\dev\\video0'\n\n" + "Get video frame."}, + {"stop", (PyCFunction)Device_manager_stop, METH_VARARGS, + "stop(dev = '\\dev\\video0')\n\n" + "Stop video capture."}, + {"close", (PyCFunction)Device_manager_close, METH_VARARGS, + "close(dev = '\\dev\\video0')\n\n" + "Close video device. Subsequent calls to other methods will fail."}, + {"list_devices", (PyCFunction)Device_manager_list_devices, METH_NOARGS, + "list_devices()\n\n" + "List available capture devices."}, + {NULL} +}; + +static PyTypeObject Device_manager_type = { + PyObject_HEAD_INIT(NULL) + 0, "v4l2capture.Device_manager", sizeof(Device_manager), 0, + (destructor)Device_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, Py_TPFLAGS_DEFAULT, "Device_manager(path)\n\nOpens the video device at " + "the given path and returns an object that can capture images. The " + "constructor and all methods except close may raise IOError.", 0, 0, 0, + 0, 0, 0, Device_manager_methods, 0, 0, 0, 0, 0, 0, 0, + (initproc)Device_manager_init +}; + +static PyMethodDef Video_out_manager_methods[] = { + {"open", (PyCFunction)Video_out_manager_open, METH_VARARGS, + "open(dev = '\\dev\\video0', pixel_format, width, height)\n\n" + "Open video output."}, + {"send_frame", (PyCFunction)Video_out_manager_Send_frame, METH_VARARGS, + "send_frame(dev = '\\dev\\video0', img, pixel_format, width, height)\n\n" + "Send frame to video stream output."}, + {"close", (PyCFunction)Video_out_manager_close, METH_VARARGS, + "close(dev = '\\dev\\video0')\n\n" + "Close video device. Subsequent calls to other methods will fail."}, + {"list_devices", (PyCFunction)Video_out_manager_list_devices, METH_NOARGS, + "list_devices()\n\n" + "List available capture devices."}, + {NULL} +}; + +static PyTypeObject Video_out_manager_type = { + PyObject_HEAD_INIT(NULL) + 0, "v4l2capture.Video_out_stream_manager", sizeof(Video_out_manager), 0, + (destructor)Video_out_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, Py_TPFLAGS_DEFAULT, "Video_out_manager(path)\n\nOpens the video device at " + "the given path and returns an object that can capture images. The " + "constructor and all methods except close may raise IOError.", 0, 0, 0, + 0, 0, 0, Video_out_manager_methods, 0, 0, 0, 0, 0, 0, 0, + (initproc)Video_out_manager_init +}; + +static PyMethodDef Video_out_file_manager_methods[] = { + {"open", (PyCFunction)Video_out_file_manager_open, METH_VARARGS, + "open(filename = 'out.wmv', width, height)\n\n" + "Open video output."}, + {"send_frame", (PyCFunction)Video_out_file_manager_Send_frame, METH_VARARGS, + "send_frame(filename = 'out.wmv', img, pixel_format, width, height, timestamp=None)\n\n" + "Send frame to video stream output."}, + {"close", (PyCFunction)Video_out_file_manager_close, METH_VARARGS, + "close(filename = 'out.wmv')\n\n" + "Close video device. Subsequent calls to other methods will fail."}, + {"set_frame_rate", (PyCFunction)Video_out_file_manager_Set_Frame_Rate, METH_VARARGS, + "set_frame_rate(filename = 'out.wmv', frame_rate)\n\n" + "Set output frame rate."}, + {"set_video_codec", (PyCFunction)Video_out_file_manager_Set_Video_Codec, METH_VARARGS, + "set_video_codec(filename = 'out.wmv', codec = 'H264', bitrate)\n\n" + "Set output video codec."}, + {NULL} +}; + +static PyTypeObject Video_out_file_manager_type = { + PyObject_HEAD_INIT(NULL) + 0, "v4l2capture.Video_out_file_manager", sizeof(Video_out_manager), 0, + (destructor)Video_out_file_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, Py_TPFLAGS_DEFAULT, "Video_out_manager(path)\n\nOpens the video device at " + "the given path and returns an object that can capture images. The " + "constructor and all methods except close may raise IOError.", 0, 0, 0, + 0, 0, 0, Video_out_file_manager_methods, 0, 0, 0, 0, 0, 0, 0, + (initproc)Video_out_file_manager_init +}; + +// ********************************************************************* + +static PyMethodDef module_methods[] = { + { "InsertHuffmanTable", (PyCFunction)InsertHuffmanTable, METH_VARARGS, NULL }, + { "DecodeAndResizeFrame", (PyCFunction)DecodeAndResizeFrameHighLevel, METH_VARARGS, NULL }, + { NULL, NULL, 0, NULL } +}; + +PyMODINIT_FUNC initvideolive(void) +{ + Device_manager_type.tp_new = PyType_GenericNew; + Video_out_manager_type.tp_new = PyType_GenericNew; + Video_out_file_manager_type.tp_new = PyType_GenericNew; + + if(PyType_Ready(&Device_manager_type) < 0) + { + return; + } + if(PyType_Ready(&Video_out_manager_type) < 0) + { + return; + } + if(PyType_Ready(&Video_out_file_manager_type) < 0) + { + return; + } + + PyObject *module = Py_InitModule3("videolive", module_methods, + "Capture and stream video."); + + if(!module) + { + return; + } + + Py_INCREF(&Device_manager_type); + PyModule_AddObject(module, "Video_in_stream_manager", (PyObject *)&Device_manager_type); + PyModule_AddObject(module, "Video_out_stream_manager", (PyObject *)&Video_out_manager_type); + PyModule_AddObject(module, "Video_out_file_manager", (PyObject *)&Video_out_file_manager_type); + +} diff --git a/list_devices.py b/list_devices.py deleted file mode 100755 index 294d17d..0000000 --- a/list_devices.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/python -# -# python-v4l2capture -# -# 2009, 2010 Fredrik Portstrom -# -# I, the copyright holder of this file, hereby release it into the -# public domain. This applies worldwide. In case this is not legally -# possible: I grant anyone the right to use this work for any -# purpose, without any conditions, unless such conditions are -# required by law. - -import os -import v4l2capture -file_names = [x for x in os.listdir("/dev") if x.startswith("video")] -file_names.sort() -for file_name in file_names: - path = "/dev/" + file_name - print path - try: - video = v4l2capture.Video_device(path) - driver, card, bus_info, capabilities = video.get_info() - print " driver: %s\n card: %s" \ - "\n bus info: %s\n capabilities: %s" % ( - driver, card, bus_info, ", ".join(capabilities)) - video.close() - except IOError, e: - print " " + str(e) diff --git a/mfvideoin.cpp b/mfvideoin.cpp new file mode 100644 index 0000000..f26ee2a --- /dev/null +++ b/mfvideoin.cpp @@ -0,0 +1,1259 @@ + +#include +#include +#include +#include +#include +using namespace std; + +#include +#include +#include +#include + +#include "mfvideoin.h" +#include "pixfmt.h" + +//See also: +//https://github.com/Itseez/opencv/blob/master/modules/highgui/src/cap_msmf.cpp + +#define MAX_DEVICE_ID_LEN 100 +int EnumDevices(IMFActivate ***ppDevicesOut); + +template void SafeRelease(T **ppT) +{ + if (*ppT) + { + (*ppT)->Release(); + *ppT = NULL; + } +} + +void PrintGuid(GUID guid) +{ + LPOLESTR lplpsz; + StringFromCLSID(guid, &lplpsz); + wcout << lplpsz << endl; + CoTaskMemFree(lplpsz); +} + +#ifndef IF_EQUAL_RETURN +#define IF_EQUAL_RETURN(param, val) if(val == param) return L#val +#endif + +LPCWSTR GetGUIDNameConst(const GUID& guid) +{ + //http://msdn.microsoft.com/en-us/library/windows/desktop/ee663602%28v=vs.85%29.aspx + IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_SUBTYPE); + IF_EQUAL_RETURN(guid, MF_MT_ALL_SAMPLES_INDEPENDENT); + IF_EQUAL_RETURN(guid, MF_MT_FIXED_SIZE_SAMPLES); + IF_EQUAL_RETURN(guid, MF_MT_COMPRESSED); + IF_EQUAL_RETURN(guid, MF_MT_SAMPLE_SIZE); + IF_EQUAL_RETURN(guid, MF_MT_WRAPPED_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_NUM_CHANNELS); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_SECOND); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FLOAT_SAMPLES_PER_SECOND); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_AVG_BYTES_PER_SECOND); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BLOCK_ALIGNMENT); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BITS_PER_SAMPLE); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_VALID_BITS_PER_SAMPLE); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_BLOCK); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_CHANNEL_MASK); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FOLDDOWN_MATRIX); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKREF); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKTARGET); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGREF); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGTARGET); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_PREFER_WAVEFORMATEX); + IF_EQUAL_RETURN(guid, MF_MT_AAC_PAYLOAD_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_SIZE); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MAX); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MIN); + IF_EQUAL_RETURN(guid, MF_MT_PIXEL_ASPECT_RATIO); + IF_EQUAL_RETURN(guid, MF_MT_DRM_FLAGS); + IF_EQUAL_RETURN(guid, MF_MT_PAD_CONTROL_FLAGS); + IF_EQUAL_RETURN(guid, MF_MT_SOURCE_CONTENT_HINT); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_CHROMA_SITING); + IF_EQUAL_RETURN(guid, MF_MT_INTERLACE_MODE); + IF_EQUAL_RETURN(guid, MF_MT_TRANSFER_FUNCTION); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_PRIMARIES); + IF_EQUAL_RETURN(guid, MF_MT_CUSTOM_VIDEO_PRIMARIES); + IF_EQUAL_RETURN(guid, MF_MT_YUV_MATRIX); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_LIGHTING); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_NOMINAL_RANGE); + IF_EQUAL_RETURN(guid, MF_MT_GEOMETRIC_APERTURE); + IF_EQUAL_RETURN(guid, MF_MT_MINIMUM_DISPLAY_APERTURE); + IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_APERTURE); + IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_ENABLED); + IF_EQUAL_RETURN(guid, MF_MT_AVG_BITRATE); + IF_EQUAL_RETURN(guid, MF_MT_AVG_BIT_ERROR_RATE); + IF_EQUAL_RETURN(guid, MF_MT_MAX_KEYFRAME_SPACING); + IF_EQUAL_RETURN(guid, MF_MT_DEFAULT_STRIDE); + IF_EQUAL_RETURN(guid, MF_MT_PALETTE); + IF_EQUAL_RETURN(guid, MF_MT_USER_DATA); + IF_EQUAL_RETURN(guid, MF_MT_AM_FORMAT_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_MPEG_START_TIME_CODE); + IF_EQUAL_RETURN(guid, MF_MT_MPEG2_PROFILE); + IF_EQUAL_RETURN(guid, MF_MT_MPEG2_LEVEL); + IF_EQUAL_RETURN(guid, MF_MT_MPEG2_FLAGS); + IF_EQUAL_RETURN(guid, MF_MT_MPEG_SEQUENCE_HEADER); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_0); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_0); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_1); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_1); + IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_SRC_PACK); + IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_CTRL_PACK); + IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_HEADER); + IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_FORMAT); + IF_EQUAL_RETURN(guid, MF_MT_IMAGE_LOSS_TOLERANT); + IF_EQUAL_RETURN(guid, MF_MT_MPEG4_SAMPLE_DESCRIPTION); + IF_EQUAL_RETURN(guid, MF_MT_MPEG4_CURRENT_SAMPLE_ENTRY); + IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_4CC); + IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_WAVE_FORMAT_TAG); + + //IF_EQUAL_RETURN(guid, FORMAT_VideoInfo); //Dshow dependent + //IF_EQUAL_RETURN(guid, FORMAT_VideoInfo2); + + // Media types + + IF_EQUAL_RETURN(guid, MFMediaType_Audio); + IF_EQUAL_RETURN(guid, MFMediaType_Video); + IF_EQUAL_RETURN(guid, MFMediaType_Protected); + IF_EQUAL_RETURN(guid, MFMediaType_SAMI); + IF_EQUAL_RETURN(guid, MFMediaType_Script); + IF_EQUAL_RETURN(guid, MFMediaType_Image); + IF_EQUAL_RETURN(guid, MFMediaType_HTML); + IF_EQUAL_RETURN(guid, MFMediaType_Binary); + IF_EQUAL_RETURN(guid, MFMediaType_FileTransfer); + + IF_EQUAL_RETURN(guid, MFVideoFormat_AI44); // FCC('AI44') + IF_EQUAL_RETURN(guid, MFVideoFormat_ARGB32); // D3DFMT_A8R8G8B8 + IF_EQUAL_RETURN(guid, MFVideoFormat_AYUV); // FCC('AYUV') + IF_EQUAL_RETURN(guid, MFVideoFormat_DV25); // FCC('dv25') + IF_EQUAL_RETURN(guid, MFVideoFormat_DV50); // FCC('dv50') + IF_EQUAL_RETURN(guid, MFVideoFormat_DVH1); // FCC('dvh1') + IF_EQUAL_RETURN(guid, MFVideoFormat_DVSD); // FCC('dvsd') + IF_EQUAL_RETURN(guid, MFVideoFormat_DVSL); // FCC('dvsl') + IF_EQUAL_RETURN(guid, MFVideoFormat_H264); // FCC('H264') + IF_EQUAL_RETURN(guid, MFVideoFormat_I420); // FCC('I420') + IF_EQUAL_RETURN(guid, MFVideoFormat_IYUV); // FCC('IYUV') + IF_EQUAL_RETURN(guid, MFVideoFormat_M4S2); // FCC('M4S2') + IF_EQUAL_RETURN(guid, MFVideoFormat_MJPG); + IF_EQUAL_RETURN(guid, MFVideoFormat_MP43); // FCC('MP43') + IF_EQUAL_RETURN(guid, MFVideoFormat_MP4S); // FCC('MP4S') + IF_EQUAL_RETURN(guid, MFVideoFormat_MP4V); // FCC('MP4V') + IF_EQUAL_RETURN(guid, MFVideoFormat_MPG1); // FCC('MPG1') + IF_EQUAL_RETURN(guid, MFVideoFormat_MSS1); // FCC('MSS1') + IF_EQUAL_RETURN(guid, MFVideoFormat_MSS2); // FCC('MSS2') + IF_EQUAL_RETURN(guid, MFVideoFormat_NV11); // FCC('NV11') + IF_EQUAL_RETURN(guid, MFVideoFormat_NV12); // FCC('NV12') + IF_EQUAL_RETURN(guid, MFVideoFormat_P010); // FCC('P010') + IF_EQUAL_RETURN(guid, MFVideoFormat_P016); // FCC('P016') + IF_EQUAL_RETURN(guid, MFVideoFormat_P210); // FCC('P210') + IF_EQUAL_RETURN(guid, MFVideoFormat_P216); // FCC('P216') + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB24); // D3DFMT_R8G8B8 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB32); // D3DFMT_X8R8G8B8 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB555); // D3DFMT_X1R5G5B5 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB565); // D3DFMT_R5G6B5 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB8); + IF_EQUAL_RETURN(guid, MFVideoFormat_UYVY); // FCC('UYVY') + IF_EQUAL_RETURN(guid, MFVideoFormat_v210); // FCC('v210') + IF_EQUAL_RETURN(guid, MFVideoFormat_v410); // FCC('v410') + IF_EQUAL_RETURN(guid, MFVideoFormat_WMV1); // FCC('WMV1') + IF_EQUAL_RETURN(guid, MFVideoFormat_WMV2); // FCC('WMV2') + IF_EQUAL_RETURN(guid, MFVideoFormat_WMV3); // FCC('WMV3') + IF_EQUAL_RETURN(guid, MFVideoFormat_WVC1); // FCC('WVC1') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y210); // FCC('Y210') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y216); // FCC('Y216') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y410); // FCC('Y410') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y416); // FCC('Y416') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y41P); + IF_EQUAL_RETURN(guid, MFVideoFormat_Y41T); + IF_EQUAL_RETURN(guid, MFVideoFormat_YUY2); // FCC('YUY2') + IF_EQUAL_RETURN(guid, MFVideoFormat_YV12); // FCC('YV12') + IF_EQUAL_RETURN(guid, MFVideoFormat_YVYU); + + IF_EQUAL_RETURN(guid, MFAudioFormat_PCM); // WAVE_FORMAT_PCM + IF_EQUAL_RETURN(guid, MFAudioFormat_Float); // WAVE_FORMAT_IEEE_FLOAT + IF_EQUAL_RETURN(guid, MFAudioFormat_DTS); // WAVE_FORMAT_DTS + IF_EQUAL_RETURN(guid, MFAudioFormat_Dolby_AC3_SPDIF); // WAVE_FORMAT_DOLBY_AC3_SPDIF + IF_EQUAL_RETURN(guid, MFAudioFormat_DRM); // WAVE_FORMAT_DRM + IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV8); // WAVE_FORMAT_WMAUDIO2 + IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV9); // WAVE_FORMAT_WMAUDIO3 + IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudio_Lossless); // WAVE_FORMAT_WMAUDIO_LOSSLESS + IF_EQUAL_RETURN(guid, MFAudioFormat_WMASPDIF); // WAVE_FORMAT_WMASPDIF + IF_EQUAL_RETURN(guid, MFAudioFormat_MSP1); // WAVE_FORMAT_WMAVOICE9 + IF_EQUAL_RETURN(guid, MFAudioFormat_MP3); // WAVE_FORMAT_MPEGLAYER3 + IF_EQUAL_RETURN(guid, MFAudioFormat_MPEG); // WAVE_FORMAT_MPEG + IF_EQUAL_RETURN(guid, MFAudioFormat_AAC); // WAVE_FORMAT_MPEG_HEAAC + IF_EQUAL_RETURN(guid, MFAudioFormat_ADTS); // WAVE_FORMAT_MPEG_ADTS_AAC + + return NULL; +} + +HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride) +{ + LONG lStride = 0; + + // Try to get the default stride from the media type. + HRESULT hr = pType->GetUINT32(MF_MT_DEFAULT_STRIDE, (UINT32*)&lStride); + + if (FAILED(hr)) + { + // Attribute not set. Try to calculate the default stride. + + GUID subtype = GUID_NULL; + + UINT32 width = 0; + UINT32 height = 0; + // Get the subtype and the image size. + hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype); + if (FAILED(hr)) + { + goto done; + } + hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height); + if (FAILED(hr)) + { + goto done; + } + hr = MFGetStrideForBitmapInfoHeader(subtype.Data1, width, &lStride); + if (FAILED(hr)) + { + goto done; + } + + // Set the attribute for later reference. + (void)pType->SetUINT32(MF_MT_DEFAULT_STRIDE, UINT32(lStride)); + } + + if (SUCCEEDED(hr)) + { + *plStride = lStride; + } + +done: + return hr; +} + +DWORD SampleToStaticObj(IMFSample *pSample, char **buff) +{ + if(*buff!=NULL) + throw runtime_error("Buff ptr should be initially null"); + IMFMediaBuffer *ppBuffer = NULL; + HRESULT hr = pSample->ConvertToContiguousBuffer(&ppBuffer); + //cout << "ConvertToContiguousBuffer=" << SUCCEEDED(hr) << "\tstride="<< plStride << "\n"; + + IMF2DBuffer *m_p2DBuffer = NULL; + ppBuffer->QueryInterface(IID_IMF2DBuffer, (void**)&m_p2DBuffer); + //cout << "IMF2DBuffer=" << (m_p2DBuffer != NULL) << "\n"; + + DWORD pcbCurrentLength = 0; + BYTE *ppbBuffer = NULL; + DWORD pcbMaxLength = 0; + + if(SUCCEEDED(hr)) + { + + hr = ppBuffer->Lock(&ppbBuffer, &pcbMaxLength, &pcbCurrentLength); + //cout << "pcbMaxLength="<< pcbMaxLength << "\tpcbCurrentLength=" <Unlock(); + } + + if(ppBuffer) ppBuffer->Release(); + return pcbCurrentLength; +} + +class SourceReaderCB : public IMFSourceReaderCallback +{ + //http://msdn.microsoft.com/en-us/library/windows/desktop/gg583871%28v=vs.85%29.aspx +public: + LONG volatile m_nRefCount; + CRITICAL_SECTION lock; + int framePending; + unsigned int maxNumFrames; + unsigned int droppedFrames; + + vector frameBuff; + vector frameLenBuff; + vector hrStatusBuff; + vector dwStreamIndexBuff; + vector dwStreamFlagsBuff; + vector llTimestampBuff; + + SourceReaderCB() + { + m_nRefCount = 0; + framePending = 0; + InitializeCriticalSection(&lock); + maxNumFrames = 1; + droppedFrames = 0; + } + + virtual ~SourceReaderCB() + { + DeleteCriticalSection(&lock); + for(unsigned int i=0; iframeBuff.size(); i++) + delete [] this->frameBuff[i]; + } + + STDMETHODIMP QueryInterface(REFIID iid, void** ppv) + { + static const QITAB qit[] = + { + QITABENT(SourceReaderCB, IMFSourceReaderCallback), + { 0 }, + }; + return QISearch(this, qit, iid, ppv); + } + + void CheckForBufferOverflow() + { + //The lock should already be in use + while(this->frameBuff.size() > this->maxNumFrames) + { + //Drop an old frame if buffer is starting to overflow + char *frameToDrop = frameBuff[0]; + delete [] frameToDrop; + frameToDrop = NULL; + frameBuff.erase(frameBuff.begin()); + frameLenBuff.erase(frameLenBuff.begin()); + hrStatusBuff.erase(hrStatusBuff.begin()); + dwStreamIndexBuff.erase(dwStreamIndexBuff.begin()); + dwStreamFlagsBuff.erase(dwStreamFlagsBuff.begin()); + llTimestampBuff.erase(llTimestampBuff.begin()); + droppedFrames ++; + } + } + + void SetMaxBufferSize(unsigned maxBuffSizeIn) + { + EnterCriticalSection(&lock); + this->maxNumFrames = maxBuffSizeIn; + this->CheckForBufferOverflow(); + LeaveCriticalSection(&lock); + } + + STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, + DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample) + { + //cout << "OnReadSample: " << llTimestamp << endl; + EnterCriticalSection(&lock); + + if (pSample) + { + char *buff = NULL; + DWORD buffLen = SampleToStaticObj(pSample, &buff); + //cout << (long) buff << "," << buffLen << endl; + //if(buff!=NULL) delete [] buff; + + //Always add frame to buffer + frameBuff.push_back(buff); + frameLenBuff.push_back(buffLen); + hrStatusBuff.push_back(hrStatus); + dwStreamIndexBuff.push_back(dwStreamIndex); + dwStreamFlagsBuff.push_back(dwStreamFlags); + llTimestampBuff.push_back(llTimestamp); + + this->CheckForBufferOverflow(); + } + + this->framePending = 0; + LeaveCriticalSection(&lock); + + return S_OK; + } + + STDMETHODIMP_(ULONG) AddRef() + { + return InterlockedIncrement(&m_nRefCount); + } + + STDMETHODIMP_(ULONG) Release() + { + ULONG uCount = InterlockedDecrement(&m_nRefCount); + if (uCount == 0) + { + //cout << "self destruct" << endl; + delete this; + } + return uCount; + } + + STDMETHODIMP OnEvent(DWORD, IMFMediaEvent *) + { + return S_OK; + } + + STDMETHODIMP OnFlush(DWORD) + { + return S_OK; + } + + void SetPending() + { + EnterCriticalSection(&lock); + this->framePending = 1; + LeaveCriticalSection(&lock); + } + + int GetPending() + { + EnterCriticalSection(&lock); + int pendingCopy = this->framePending; + LeaveCriticalSection(&lock); + return pendingCopy; + } + + void WaitForFrame() + { + while(1) + { + EnterCriticalSection(&lock); + int pendingCopy = this->framePending; + LeaveCriticalSection(&lock); + if (!pendingCopy) return; + Sleep(10); + } + } + + int GetFrame(HRESULT *hrStatus, DWORD *dwStreamIndex, + DWORD *dwStreamFlags, LONGLONG *llTimestamp, char **frame, DWORD *buffLen) + { + int ret = 0; + *hrStatus = S_OK; + *dwStreamIndex = 0; + *dwStreamFlags = 0; + *llTimestamp = 0; + *frame = NULL; + *buffLen = 0; + + EnterCriticalSection(&lock); + if(this->frameBuff.size()>0) + { + *frame = frameBuff[0]; + *buffLen = frameLenBuff[0]; + *hrStatus = hrStatusBuff[0]; + *dwStreamIndex = dwStreamIndexBuff[0]; + *dwStreamFlags = dwStreamFlagsBuff[0]; + *llTimestamp = llTimestampBuff[0]; + + this->frameBuff.erase(this->frameBuff.begin()); + this->frameLenBuff.erase(this->frameLenBuff.begin()); + this->hrStatusBuff.erase(this->hrStatusBuff.begin()); + this->dwStreamIndexBuff.erase(this->dwStreamIndexBuff.begin()); + this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); + this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + ret = 1; + } + + LeaveCriticalSection(&lock); + return ret; + } + +}; +//************************************************************************** + +WmfBase::WmfBase() : Base_Video_In() +{ + HRESULT hr = MFStartup(MF_VERSION); + if(!SUCCEEDED(hr)) + throw std::runtime_error("Media foundation startup failed"); + + hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); + if(hr == RPC_E_CHANGED_MODE) + throw std::runtime_error("CoInitializeEx failed"); +} + +WmfBase::~WmfBase() +{ + MFShutdown(); + + CoUninitialize(); +} + + +//*************************************************************************** + +MfVideoIn::MfVideoIn(const wchar_t *devNameIn) : WmfBase() +{ + this->asyncMode = 1; + this->devName = devNameIn; + this->reader = NULL; + this->source = NULL; + this->readerCallback = NULL; + this->stopping = 0; + this->stopped = 0; + + this->openDevFlag = 0; + this->startDevFlag = 0; + this->stopDevFlag = 0; + this->closeDevFlag = 0; + this->maxBuffSize = 1; + InitializeCriticalSection(&lock); +} + +MfVideoIn::~MfVideoIn() +{ + this->WaitForStop(); + + SafeRelease(&reader); + SafeRelease(&source); + DeleteCriticalSection(&lock); +} + +void MfVideoIn::Stop() +{ + EnterCriticalSection(&lock); + this->stopping = 1; + LeaveCriticalSection(&lock); +} + +void MfVideoIn::WaitForStop() +{ + this->Stop(); + + int waiting = 1; + while(waiting) + { + EnterCriticalSection(&lock); + waiting = !this->stopped; + LeaveCriticalSection(&lock); + Sleep(10); + } +} + +void MfVideoIn::OpenDevice() +{ + cout << "MfVideoIn::OpenDevice()" << endl; + EnterCriticalSection(&lock); + this->openDevFlag = 1; + LeaveCriticalSection(&lock); +} + +void MfVideoIn::SetFormat(const char *fmt, int width, int height) +{ + +} + +void MfVideoIn::StartDevice(int buffer_count) +{ + cout << "MfVideoIn::StartDevice()" << endl; + EnterCriticalSection(&lock); + this->startDevFlag = 1; + LeaveCriticalSection(&lock); +} + +void MfVideoIn::StopDevice() +{ + EnterCriticalSection(&lock); + this->stopDevFlag = 1; + LeaveCriticalSection(&lock); +} + +void MfVideoIn::CloseDevice() +{ + cout << "MfVideoIn::CloseDevice()" << endl; + EnterCriticalSection(&lock); + this->closeDevFlag = 1; + LeaveCriticalSection(&lock); +} + +int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) +{ + if(buffOut==NULL) + throw runtime_error("Buffer ptr cannot be null"); + if(metaOut==NULL) + throw runtime_error("Meta data pointer cannot be null"); + + EnterCriticalSection(&lock); + + if(this->frameBuff.size() == 0) + { + LeaveCriticalSection(&lock); + return 0; + } + + unsigned char* currentBuff = (unsigned char *)this->frameBuff[0]; + std::string currentPixFmt = "Unknown"; + unsigned currentBuffLen = this->frameLenBuff[0]; + + //wcout << this->majorTypeBuff[0] << "," << this->subTypeBuff[0] << endl; + + if(wcscmp(this->subTypeBuff[0].c_str(), L"MFVideoFormat_YUY2")==0) + currentPixFmt = "YUYV"; //YUYV = YUY2 + if(wcscmp(this->subTypeBuff[0].c_str(), L"MFVideoFormat_RGB24")==0) + currentPixFmt = "RGB24INV"; + + //Do conversion to rgb + unsigned char *buffConv = NULL; + unsigned buffConvLen = 0; + int widthTmp = this->widthBuff[0]; + int heightTmp = this->heightBuff[0]; + int ok = DecodeFrame(currentBuff, currentBuffLen, + currentPixFmt.c_str(), + widthTmp, heightTmp, + "RGB24", + &buffConv, + &buffConvLen); + + if(ok) + { + delete [] currentBuff; //Now unneeded + currentBuff = buffConv; + currentPixFmt = "RGB24"; + currentBuffLen = buffConvLen; + } + else + { + cout << "Cannot convert from pix format "; + wcout << this->subTypeBuff[0] << endl; + } + + *buffOut = currentBuff; + metaOut->fmt = currentPixFmt; + metaOut->width = this->widthBuff[0]; + metaOut->height = this->heightBuff[0]; + metaOut->buffLen = currentBuffLen; + metaOut->sequence = 0; + metaOut->tv_sec = (unsigned long)(this->llTimestampBuff[0] / 1e7); //in 100-nanosecond units + metaOut->tv_usec = (unsigned long)((this->llTimestampBuff[0] - metaOut->tv_sec * 1e7) / 10); + + this->frameBuff.erase(this->frameBuff.begin()); + this->frameLenBuff.erase(this->frameLenBuff.begin()); + this->hrStatusBuff.erase(this->hrStatusBuff.begin()); + this->dwStreamIndexBuff.erase(this->dwStreamIndexBuff.begin()); + this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); + this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + + this->PopFrontMetaDataBuff(); + + LeaveCriticalSection(&lock); + + return 1; +} + +//*************************************************************** + +void MfVideoIn::Run() +{ + int running = 1; + try + { + while(running) + { + EnterCriticalSection(&lock); + running = !this->stopping || this->stopDevFlag || this->closeDevFlag; + int openDevFlagTmp = this->openDevFlag; + this->openDevFlag = 0; + int startDevFlagTmp = this->startDevFlag; + this->startDevFlag = 0; + int stopDevFlagTmp = this->stopDevFlag; + this->stopDevFlag = 0; + int closeDevFlagTmp = this->closeDevFlag; + this->closeDevFlag = 0; + LeaveCriticalSection(&lock); + if(!running) continue; + + if(openDevFlagTmp) + this->OpenDeviceInternal(); + + if(startDevFlagTmp) + this->StartDeviceInternal(); + + if(this->reader != NULL) + this->ReadFramesInternal(); + + if(stopDevFlagTmp) + this->StopDeviceInternal(); + + if(closeDevFlagTmp) + this->CloseDeviceInternal(); + + Sleep(10); + } + } + catch(std::exception &err) + { + cout << err.what() << endl; + } + + EnterCriticalSection(&lock); + this->stopped = 1; + LeaveCriticalSection(&lock); +} + +void MfVideoIn::OpenDeviceInternal() +{ + //Check if source is already available + if(this->source != NULL) + throw runtime_error("Device already open"); + + //Open a new source + IMFActivate **ppDevices = NULL; + int count = EnumDevices(&ppDevices); + int devIndex = -1; + + //Find device + for(int i=0; iGetAllocatedString( + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, + &symbolicLink, + NULL + ); + if(!SUCCEEDED(hr)) + { + SafeRelease(ppDevices); + throw std::runtime_error("GetAllocatedString failed"); + } + + if(wcscmp(symbolicLink, this->devName.c_str())==0) + { + devIndex = i; + } + CoTaskMemFree(symbolicLink); + } + + if(devIndex == -1) + throw runtime_error("Device not found"); + + IMFActivate *pActivate = ppDevices[devIndex]; + + //Activate device object + IMFMediaSource *sourceTmp = NULL; + HRESULT hr = pActivate->ActivateObject( + __uuidof(IMFMediaSource), + (void**)&sourceTmp + ); + if(!SUCCEEDED(hr)) + { + SafeRelease(ppDevices); + throw std::runtime_error("ActivateObject failed"); + } + + this->source = sourceTmp; + + SafeRelease(ppDevices); +} + +void MfVideoIn::StartDeviceInternal() +{ + //Create reader + IMFAttributes *pAttributes = NULL; + HRESULT hr = MFCreateAttributes(&pAttributes, 1); + if(!SUCCEEDED(hr)) + throw std::runtime_error("MFCreateAttributes failed"); + + if(source==NULL) + throw std::runtime_error("Source not open"); + + //Set attributes for reader + if(this->asyncMode) + { + this->readerCallback = new SourceReaderCB(); + + hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this->readerCallback); + } + + IMFSourceReader *readerTmp = NULL; + hr = MFCreateSourceReaderFromMediaSource(this->source, pAttributes, &readerTmp); + if(!SUCCEEDED(hr)) + { + SafeRelease(&pAttributes); + throw std::runtime_error("MFCreateSourceReaderFromMediaSource failed"); + } + + this->reader = readerTmp; + + //this->GetMfControl(CameraControl_Exposure); + //this->SetMfControl(CameraControl_Exposure, -3, 1); + //this->GetMfControl(CameraControl_Exposure, 1); + + /*try + { + int ret = this->SetMfControl(CameraControl_Exposure, -3, CameraControl_Flags_Manual); + std::cout << "ret" << ret << std::endl; + } + catch(std::runtime_error &err) + { + std::cout << "exception " << err.what() << std::endl; + }*/ + + SafeRelease(&pAttributes); +} + +int MfVideoIn::GetMfControl(long prop, int range) +{ + long Min = 0; + long Max = 0; + long Step = 0; + long Default = 0; + long Flag = 0; + + IAMCameraControl *pProcControl = NULL; + HRESULT hr = this->source->QueryInterface(IID_PPV_ARGS(&pProcControl)); + if(!SUCCEEDED(hr)) + throw runtime_error("IAMCameraControl interface not available"); + + hr = pProcControl->GetRange(prop, &Min, &Max, &Step, &Default, &Flag); + if(!SUCCEEDED(hr)) + { + SafeRelease(&pProcControl); + return 0; + } + + if(range) + { + std::cout << "Min " << Min << std::endl; + std::cout << "Max " << Max << std::endl; + std::cout << "Step " << Step << std::endl; + std::cout << "Default " << Default << std::endl; + std::cout << "Allowed Flag " << Flag << std::endl; + } + + long val = 0, flags = 0; + hr = pProcControl->Get(prop, &val, &flags); + + std::cout << "Current Value " << prop << " " << val << std::endl; + std::cout << "Current Flag " << prop << " " << flags << std::endl; + + SafeRelease(&pProcControl); + return SUCCEEDED(hr); +} + +int MfVideoIn::SetMfControl(long prop, long value, long flags) +{ + if(flags==0) + flags = CameraControl_Flags_Manual; + + IAMCameraControl *pProcControl = NULL; + HRESULT hr = this->source->QueryInterface(IID_PPV_ARGS(&pProcControl)); + if(!SUCCEEDED(hr)) + throw runtime_error("IAMCameraControl interface not available"); + + hr = pProcControl->Set(prop, value, flags); + + SafeRelease(&pProcControl); + return SUCCEEDED(hr); + +} + +int MfVideoIn::GetMfParameter(long param, int range) +{ + long Min = 0; + long Max = 0; + long Step = 0; + long Default = 0; + long Flag = 0; + + IAMVideoProcAmp *pProcAmp = NULL; + HRESULT hr = this->source->QueryInterface(IID_PPV_ARGS(&pProcAmp)); + if(!SUCCEEDED(hr)) + throw runtime_error("IAMCameraControl interface not available"); + + hr = pProcAmp->GetRange(param, &Min, &Max, &Step, &Default, &Flag); + if(!SUCCEEDED(hr)) + { + SafeRelease(&pProcAmp); + return 0; + } + + if(range) + { + std::cout << "param "<< param << " Min " << Min << std::endl; + std::cout << "param "<< param << " Max " << Max << std::endl; + std::cout << "param "<< param << " Step " << Step << std::endl; + std::cout << "param "<< param << " Default " << Default << std::endl; + std::cout << "param "<< param << " Allowed Flag " << Flag << std::endl; + } + + long val = 0, flags = 0; + hr = pProcAmp->Get(param, &val, &flags); + + std::cout << "param "<< param << " Current Value " << val << std::endl; + std::cout << "param "<< param << " Current Flag " << flags << std::endl; + + SafeRelease(&pProcAmp); + return SUCCEEDED(hr); +} + +int MfVideoIn::SetMfParameter(long param, long value, long flags) +{ + if(flags==0) + flags = CameraControl_Flags_Manual; + + IAMVideoProcAmp *pProcAmp = NULL; + HRESULT hr = this->source->QueryInterface(IID_PPV_ARGS(&pProcAmp)); + if(!SUCCEEDED(hr)) + throw runtime_error("IAMCameraControl interface not available"); + + hr = pProcAmp->Set(param, value, flags); + + SafeRelease(&pProcAmp); + return SUCCEEDED(hr); + +} + +void MfVideoIn::SetSampleMetaData(DWORD streamIndex) +{ + //Set meta data in output object + IMFMediaType *pCurrentType = NULL; + LONG plStride = 0; + GUID majorType=GUID_NULL, subType=GUID_NULL; + UINT32 width = 0; + UINT32 height = 0; + + HRESULT hr = this->reader->GetCurrentMediaType(streamIndex, &pCurrentType); + if(!SUCCEEDED(hr)) cout << "Error 3\n"; + BOOL isComp = FALSE; + hr = pCurrentType->IsCompressedFormat(&isComp); + hr = pCurrentType->GetGUID(MF_MT_MAJOR_TYPE, &majorType); + LPCWSTR typePtr = GetGUIDNameConst(majorType); + if(!SUCCEEDED(hr)) cout << "Error 4\n"; + hr = pCurrentType->GetGUID(MF_MT_SUBTYPE, &subType); + if(!SUCCEEDED(hr)) cout << "Error 5\n"; + int isVideo = (majorType==MFMediaType_Video); + if(isVideo) + { + GetDefaultStride(pCurrentType, &plStride); + hr = MFGetAttributeSize(pCurrentType, MF_MT_FRAME_SIZE, &width, &height); + if(!SUCCEEDED(hr)) cout << "Error 20\n"; + } + + LPCWSTR subTypePtr = GetGUIDNameConst(subType); + + this->plStrideBuff.push_back(plStride); + this->majorTypeBuff.push_back(typePtr); + this->subTypeBuff.push_back(subTypePtr); + this->widthBuff.push_back(width); + this->heightBuff.push_back(height); + this->isCompressedBuff.push_back(isComp); + + SafeRelease(&pCurrentType); +} + +void MfVideoIn::PopFrontMetaDataBuff() + +{ + if(this->plStrideBuff.size()>0) this->plStrideBuff.erase(this->plStrideBuff.begin()); + if(this->majorTypeBuff.size()>0) this->majorTypeBuff.erase(this->majorTypeBuff.begin()); + this->subTypeBuff.erase(this->subTypeBuff.begin()); + this->widthBuff.erase(this->widthBuff.begin()); + this->heightBuff.erase(this->heightBuff.begin()); + this->isCompressedBuff.erase(this->isCompressedBuff.begin()); +} + +void MfVideoIn::ReadFramesInternal() +{ + //Check if reader is ready + if(this->reader == NULL) + throw std::runtime_error("Reader not ready for this source"); + + HRESULT hr = S_OK; + IMFSample *pSample = NULL; + DWORD streamIndex=0, flags=0; + LONGLONG llTimeStamp=0; + + if(this->asyncMode) + { + if(!this->readerCallback->GetPending()) + { + hr = this->reader->ReadSample( + MF_SOURCE_READER_ANY_STREAM, // Stream index. + 0, NULL, NULL, NULL, NULL + ); + this->readerCallback->SetPending(); + } + + HRESULT hrStatus = S_OK; + DWORD dwStreamIndex = 0; + DWORD dwStreamFlags = 0; + LONGLONG llTimestamp = 0; + char *frame = NULL; + DWORD buffLen = 0; + + int found = this->readerCallback->GetFrame(&hrStatus, &dwStreamIndex, + &dwStreamFlags, &llTimestamp, &frame, &buffLen); + + //cout << (long) frame << "," << buffLen << endl; + if(found) + { + if((frame == NULL) != (buffLen == 0)) + throw runtime_error("Frame buffer corruption detected"); + + EnterCriticalSection(&lock); + + //Ensure the buffer does not overflow + while(this->frameBuff.size() >= this->maxBuffSize) + { + char *frameToDrop = this->frameBuff[0]; + delete [] frameToDrop; + frameToDrop = NULL; + this->frameBuff.erase(this->frameBuff.begin()); + this->frameLenBuff.erase(this->frameLenBuff.begin()); + this->hrStatusBuff.erase(this->hrStatusBuff.begin()); + this->dwStreamIndexBuff.erase(this->dwStreamIndexBuff.begin()); + this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); + this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + + this->PopFrontMetaDataBuff(); + } + + //Copy frame to output buffer + if(this->frameBuff.size() < this->maxBuffSize) + { + this->frameBuff.push_back(frame); + this->frameLenBuff.push_back(buffLen); + this->hrStatusBuff.push_back(hrStatus); + this->dwStreamIndexBuff.push_back(dwStreamIndex); + this->dwStreamFlagsBuff.push_back(dwStreamFlags); + this->llTimestampBuff.push_back(llTimestamp); + + this->SetSampleMetaData(dwStreamIndex); + } + else + { + delete [] frame; + } + + LeaveCriticalSection(&lock); + + //for(long i=VideoProcAmp_Brightness;i<=VideoProcAmp_Gain;i++) + // this->GetMfParameter(i, 0); + /*int ret = this->SetMfControl(CameraControl_Exposure, -3, CameraControl_Flags_Manual); + std::cout << "ret" << ret << std::endl; + this->GetMfControl(CameraControl_Exposure, 1); + this->SetMfParameter(VideoProcAmp_Gain, 0, VideoProcAmp_Flags_Auto); + std::cout << "ret" << ret << std::endl; + this->GetMfParameter(VideoProcAmp_Gain, 0); + this->SetMfParameter(VideoProcAmp_Gamma, 72, VideoProcAmp_Flags_Auto); + std::cout << "ret" << ret << std::endl; + this->GetMfParameter(VideoProcAmp_Gamma, 0);*/ + //for(long i=CameraControl_Pan;i<=CameraControl_Focus;i++) + // this->GetMfControl(i, 0); + + return; + } + else + return; + } + else + { + hr = this->reader->ReadSample( + MF_SOURCE_READER_ANY_STREAM, // Stream index. + 0, // Flags. + &streamIndex, // Receives the actual stream index. + &flags, // Receives status flags. + &llTimeStamp, // Receives the time stamp. + &pSample // Receives the sample or NULL. + ); + + if (FAILED(hr)) + { + return; + } + + if(pSample!=NULL) + { + char *frame = NULL; + DWORD buffLen = SampleToStaticObj(pSample, &frame); + + EnterCriticalSection(&lock); + + //Ensure the buffer does not overflow + while(this->frameBuff.size() >= this->maxBuffSize) + { + this->frameBuff.erase(this->frameBuff.begin()); + this->frameLenBuff.erase(this->frameLenBuff.begin()); + this->hrStatusBuff.erase(this->hrStatusBuff.begin()); + this->dwStreamIndexBuff.erase(this->dwStreamIndexBuff.begin()); + this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); + this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + + this->PopFrontMetaDataBuff(); + } + + //Copy frame to output buffer + if(this->frameBuff.size() < this->maxBuffSize) + { + this->frameBuff.push_back(frame); + this->frameLenBuff.push_back(buffLen); + this->hrStatusBuff.push_back(hr); + this->dwStreamIndexBuff.push_back(streamIndex); + this->dwStreamFlagsBuff.push_back(flags); + this->llTimestampBuff.push_back(llTimeStamp); + + this->SetSampleMetaData(streamIndex); + } + else + { + delete [] frame; + } + + LeaveCriticalSection(&lock); + + pSample->Release(); + return; + } + + if(pSample) pSample->Release(); + } + +} + +void MfVideoIn::StopDeviceInternal() +{ + if(this->reader == NULL) + throw runtime_error("Device is not running"); + + //Shut down reader + SafeRelease(&this->reader); + + //Reader callback seems to automatically delete + this->readerCallback = NULL; + +} + +void MfVideoIn::CloseDeviceInternal() +{ + if(this->source == NULL) + throw runtime_error("Device is not open"); + + //Shut down source + SafeRelease(&this->source); +} + +//*************************************************************** + +void *MfVideoIn_Worker_thread(void *arg) +{ + class MfVideoIn *argobj = (class MfVideoIn*) arg; + argobj->Run(); + + return NULL; +} + +//****************************************************************** + +int EnumDevices(IMFActivate ***ppDevicesOut) +{ + //Warning: the result from this function must be manually freed! + + //Allocate memory to store devices + IMFAttributes *pAttributes = NULL; + *ppDevicesOut = NULL; + HRESULT hr = MFCreateAttributes(&pAttributes, 1); + if(!SUCCEEDED(hr)) + throw std::runtime_error("MFCreateAttributes failed"); + + hr = pAttributes->SetGUID( + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID + ); + if(!SUCCEEDED(hr)) + { + SafeRelease(&pAttributes); + throw std::runtime_error("SetGUID failed"); + } + + //Get list of devices from media foundation + UINT32 count; + hr = MFEnumDeviceSources(pAttributes, ppDevicesOut, &count); + if(!SUCCEEDED(hr)) + { + SafeRelease(&pAttributes); + throw std::runtime_error("MFEnumDeviceSources failed"); + } + + SafeRelease(&pAttributes); + return count; +} + +class WmfListDevices : public WmfBase +{ +public: + WmfListDevices() : WmfBase() + { + + } + + virtual ~WmfListDevices() + { + + } + + std::vector > ListDevices() + { + std::vector > out; + + IMFActivate **ppDevices = NULL; + int count = EnumDevices(&ppDevices); + + //For each device + for(int i=0; iGetAllocatedString( + MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, + &vd_pFriendlyName, + NULL + ); + if(!SUCCEEDED(hr)) + { + SafeRelease(ppDevices); + CoTaskMemFree(vd_pFriendlyName); + throw std::runtime_error("GetAllocatedString failed"); + } + + wchar_t *symbolicLink = NULL; + hr = pActivate->GetAllocatedString( + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, + &symbolicLink, + NULL + ); + if(!SUCCEEDED(hr)) + { + SafeRelease(ppDevices); + CoTaskMemFree(vd_pFriendlyName); + CoTaskMemFree(symbolicLink); + throw std::runtime_error("GetAllocatedString failed"); + } + + std::vector src; + src.push_back(symbolicLink); + src.push_back(vd_pFriendlyName); + out.push_back(src); + + CoTaskMemFree(vd_pFriendlyName); + CoTaskMemFree(symbolicLink); + } + + if(ppDevices) + SafeRelease(ppDevices); + + return out; + } +}; + +std::vector > List_in_devices() +{ + class WmfListDevices wmfListDevices; + std::vector > out = wmfListDevices.ListDevices(); + + return out; +} diff --git a/mfvideoin.h b/mfvideoin.h new file mode 100644 index 0000000..f76ad95 --- /dev/null +++ b/mfvideoin.h @@ -0,0 +1,81 @@ + +#ifndef MFVIDEOIN_H +#define MFVIDEOIN_H + +#include +#include +#include +#include +#include "base.h" + +class WmfBase : public Base_Video_In +{ +public: + WmfBase(); + virtual ~WmfBase(); + +}; + +class MfVideoIn : public WmfBase +{ +public: + MfVideoIn(const wchar_t *devName); + virtual ~MfVideoIn(); + + virtual void Stop(); + virtual void WaitForStop(); + virtual void OpenDevice(); + virtual void SetFormat(const char *fmt, int width, int height); + virtual void StartDevice(int buffer_count); + virtual void StopDevice(); + virtual void CloseDevice(); + virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut); + + virtual int GetMfControl(long prop, int range = 0); + virtual int SetMfControl(long prop, long value = 0, long flags = 0); + virtual int GetMfParameter(long param, int range = 0); + virtual int SetMfParameter(long param, long value = 0, long flags = 0); + + void Run(); +protected: + + IMFSourceReader* reader; + IMFMediaSource* source; + int asyncMode; + std::wstring devName; + class SourceReaderCB* readerCallback; + int stopping; + int stopped; + int openDevFlag; + int startDevFlag; + int stopDevFlag; + int closeDevFlag; + CRITICAL_SECTION lock; + unsigned maxBuffSize; + + std::vector frameBuff; + std::vector frameLenBuff; + std::vector hrStatusBuff; + std::vector dwStreamIndexBuff; + std::vector dwStreamFlagsBuff; + std::vector llTimestampBuff; + + std::vector plStrideBuff; + std::vector majorTypeBuff, subTypeBuff; + std::vector widthBuff; + std::vector heightBuff; + std::vector isCompressedBuff; + + void OpenDeviceInternal(); + void StartDeviceInternal(); + void SetSampleMetaData(DWORD streamIndex); + void PopFrontMetaDataBuff(); + void ReadFramesInternal(); + void StopDeviceInternal(); + void CloseDeviceInternal(); +}; + +void *MfVideoIn_Worker_thread(void *arg); +std::vector > List_in_devices(); + +#endif //MFVIDEOIN_H diff --git a/mfvideoout.cpp b/mfvideoout.cpp new file mode 100644 index 0000000..b79c73a --- /dev/null +++ b/mfvideoout.cpp @@ -0,0 +1,75 @@ + +#include "mfvideoout.h" +#include +#include + +//http://msdn.microsoft.com/en-us/library/windows/desktop/ms700134%28v=vs.85%29.aspx + +MfVideoOut::MfVideoOut(const char *devName) : Base_Video_Out() +{ + HRESULT hr = MFStartup(MF_VERSION); + if(!SUCCEEDED(hr)) + throw std::runtime_error("Media foundation startup failed"); + + hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); + if(hr == RPC_E_CHANGED_MODE) + throw std::runtime_error("CoInitializeEx failed"); + + + + + +} + +MfVideoOut::~MfVideoOut() +{ + MFShutdown(); + + CoUninitialize(); +} + +void MfVideoOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) +{ + +} + +void MfVideoOut::Stop() +{ + +} + +int MfVideoOut::WaitForStop() +{ + return 1; +} + +void MfVideoOut::SetOutputSize(int width, int height) +{ + +} + +void MfVideoOut::SetOutputPxFmt(const char *fmt) +{ + +} + +void MfVideoOut::Run() +{ + +} + +//******************************************************************************* + +void *MfVideoOut_Worker_thread(void *arg) +{ + class MfVideoOut *argobj = (class MfVideoOut*) arg; + argobj->Run(); + + return NULL; +} + +std::vector List_out_devices() +{ + std::vector out; + return out; +} diff --git a/mfvideoout.h b/mfvideoout.h new file mode 100644 index 0000000..f406a65 --- /dev/null +++ b/mfvideoout.h @@ -0,0 +1,31 @@ + +#ifndef MFVIDEOOUT_H +#define MFVIDEOOUT_H + +#include +#include +#include "base.h" + +class MfVideoOut : public Base_Video_Out +{ +public: + MfVideoOut(const char *devName); + virtual ~MfVideoOut(); + + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height); + void Stop(); + int WaitForStop(); + + virtual void SetOutputSize(int width, int height); + virtual void SetOutputPxFmt(const char *fmt); + + void Run(); + +}; + +void *MfVideoOut_Worker_thread(void *arg); + +std::vector List_out_devices(); + +#endif //MFVIDEOOUT_H + diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp new file mode 100644 index 0000000..3d0c9d8 --- /dev/null +++ b/mfvideooutfile.cpp @@ -0,0 +1,561 @@ + +#include "mfvideooutfile.h" +#include "pixfmt.h" +#include +#include +#include +#include +#include +using namespace std; + +template void SafeRelease(T **ppT) +{ + if (*ppT) + { + (*ppT)->Release(); + *ppT = NULL; + } +} + +std::string HrCodeToStdString(HRESULT hr) +{ + std::string out; + _com_error err(hr); + LPCTSTR hrErrMsg = err.ErrorMessage(); + +#ifdef UNICODE + size_t errsize = wcstombs(NULL, hrErrMsg, 0); + char* tmpStr = new char[errsize + 1]; + wcstombs(tmpStr, hrErrMsg, errsize + 1 ); + out = hrErrMsg; + delete tmpStr; +#else + out = hrErrMsg; +#endif + return out; +} + +FILETIME GetTimeNow() +{ + SYSTEMTIME systime; + GetSystemTime(&systime); + FILETIME time; + SystemTimeToFileTime(&systime, &time); + return time; +} + +double SubtractTimes(FILETIME first, FILETIME second) +{ + LONGLONG diffInTicks = + reinterpret_cast(&first)->QuadPart - + reinterpret_cast(&second)->QuadPart; + double diffInSec = diffInTicks / (double)1e7; + return diffInSec; +} + +void SetTimeToZero(FILETIME &t) +{ + t.dwLowDateTime = 0; + t.dwHighDateTime = 0; +} + +bool TimeIsZero(FILETIME &t) +{ + if (t.dwLowDateTime != 0) return 0; + return t.dwHighDateTime == 0; +} + +std::wstring CStringToWString(const char *inStr) +{ + wchar_t *tmpDevName = new wchar_t[strlen(inStr)+1]; + size_t returnValue; + + mbstowcs_s(&returnValue, tmpDevName, strlen(inStr)+1, inStr, strlen(inStr)+1); + std::wstring tmpDevName2(tmpDevName); + delete [] tmpDevName; + return tmpDevName2; +} + +MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() +{ + HRESULT hr = MFStartup(MF_VERSION); + if(!SUCCEEDED(hr)) + throw std::runtime_error("Media foundation startup failed"); + + hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); + if(hr == RPC_E_CHANGED_MODE) + throw std::runtime_error("CoInitializeEx failed"); + + this->pSinkWriter = NULL; + this->streamIndex = 0; + this->rtStart = 0; + this->pxFmt = "YV12"; //"BGR24"; + this->videoCodec = "H264"; //"WMV3"; + + this->outputWidth = 640; + this->outputHeight = 480; + this->bitRate = 800000; + this->fina = CStringToWString(fiName); + this->frameRateFps = 25; + this->prevFrameDuration = 0; + this->pIByteStream = NULL; + SetTimeToZero(this->startVideoTime); +} + +MfVideoOutFile::~MfVideoOutFile() +{ + MFShutdown(); + + CoUninitialize(); +} + +void MfVideoOutFile::OpenFile() +{ + + if(this->pSinkWriter != NULL) + { + throw std::runtime_error("Video output file already open"); + } + this->rtStart = 0; + IMFMediaType *pMediaTypeOut = NULL; + IMFMediaType *pMediaTypeIn = NULL; + this->rtDuration = 1; + std::string errMsg; + if(this->frameRateFps > 0) + MFFrameRateToAverageTimePerFrame(this->frameRateFps, 1, &this->rtDuration); + + IMFAttributes *containerAttributes = NULL; + HRESULT hr = MFCreateAttributes(&containerAttributes, 0); + + this->pIByteStream = NULL; + + if (SUCCEEDED(hr)) + { + hr = MFCreateFile(MF_ACCESSMODE_READWRITE, + MF_OPENMODE_DELETE_IF_EXIST, + MF_FILEFLAGS_NONE, + this->fina.c_str(), + &pIByteStream); + if (!SUCCEEDED(hr)) + { + errMsg = "MFCreateFile failed"; + } + } + + if(containerAttributes!=NULL) + { + int len4 = this->fina.size() - 4; + if(len4 < 0) len4 = 0; + const wchar_t *ext4 = &this->fina.c_str()[len4]; + if(wcscmp(ext4, L".mp4")==0) + containerAttributes->SetGUID(MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_MPEG4); + if(wcscmp(ext4, L".asf")==0) + containerAttributes->SetGUID(MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_ASF); + if(wcscmp(ext4, L".wmv")==0) + containerAttributes->SetGUID(MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_ASF); + if(wcscmp(ext4, L".mp3")==0) + containerAttributes->SetGUID(MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_MP3); +#ifdef MFTranscodeContainerType_AVI + if(wcscmp(ext4, L".avi")==0) + containerAttributes->SetGUID(MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_AVI); +#endif + } + + if (SUCCEEDED(hr)) + { + hr = MFCreateSinkWriterFromURL(this->fina.c_str(), pIByteStream, containerAttributes, &pSinkWriter); + if (!SUCCEEDED(hr)) errMsg = "MFCreateSinkWriterFromURL failed"; + } + + // Set the output media type. + if (SUCCEEDED(hr)) + { + hr = MFCreateMediaType(&pMediaTypeOut); + if (!SUCCEEDED(hr)) errMsg = "MFCreateMediaType failed"; + } + if (SUCCEEDED(hr)) + { + hr = pMediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + if (!SUCCEEDED(hr)) errMsg = "SetGUID MF_MT_MAJOR_TYPE failed"; + } + if (SUCCEEDED(hr)) + { + if(strcmp(this->videoCodec.c_str(), "WMV3")==0) + hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_WMV3); + if(strcmp(this->videoCodec.c_str(), "H264")==0) + hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); + if (!SUCCEEDED(hr)) errMsg = "SetGUID MF_MT_SUBTYPE failed"; + } + if (SUCCEEDED(hr)) + { + hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, this->bitRate); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_AVG_BITRATE failed"; + } + + if (SUCCEEDED(hr)) + { + hr = pMediaTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_INTERLACE_MODE failed"; + } + if (SUCCEEDED(hr)) + { + hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, this->outputWidth, this->outputHeight); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_SIZE failed"; + } + if (SUCCEEDED(hr)) + { + hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, this->frameRateFps, 1); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_RATE failed"; + } + if (SUCCEEDED(hr)) + { + hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_PIXEL_ASPECT_RATIO failed"; + } + + if (SUCCEEDED(hr)) + { + hr = pSinkWriter->AddStream(pMediaTypeOut, &streamIndex); + if (!SUCCEEDED(hr)) errMsg = "AddStream failed"; + } + + // Get supported types of output + /*IMFTransform *transform = NULL; + if (SUCCEEDED(hr)) + { + hr = pSinkWriter->GetServiceForStream(streamIndex, GUID_NULL, IID_IMFTransform, (LPVOID*)&transform); + if (!SUCCEEDED(hr)) + { + errMsg = "GetServiceForStream failed: "; + std::string hrErrStr = HrCodeToStdString(hr); + errMsg += hrErrStr; + } + } + + if (SUCCEEDED(hr) && transform != NULL) + { + IMFMediaType *fmtType; + hr = transform->GetInputAvailableType(streamIndex, 0, &fmtType); + std::cout << SUCCEEDED(hr) << "," << (LONG)fmtType << std::endl; + if (!SUCCEEDED(hr)) errMsg = "GetInputAvailableType failed"; + }*/ + + // Set the input media type. + if (SUCCEEDED(hr)) + { + hr = MFCreateMediaType(&pMediaTypeIn); + if (!SUCCEEDED(hr)) errMsg = "Set MFCreateMediaType failed"; + } + if (SUCCEEDED(hr)) + { + hr = pMediaTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_MAJOR_TYPE failed"; + } + + if (SUCCEEDED(hr)) + { + if(strcmp(this->pxFmt.c_str(), "BGR24")==0) + hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); + if(strcmp(this->pxFmt.c_str(), "I420")==0) + hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_I420); + if(strcmp(this->pxFmt.c_str(), "YV12")==0) //Supported by H264 + hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_SUBTYPE failed"; + } + if (SUCCEEDED(hr)) + { + hr = pMediaTypeIn->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_INTERLACE_MODE failed"; + } + if (SUCCEEDED(hr)) + { + hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, this->outputWidth, this->outputHeight); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_SIZE failed"; + } + if (SUCCEEDED(hr)) + { + hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, this->frameRateFps, 1); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_RATE failed"; + } + if (SUCCEEDED(hr)) + { + hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_PIXEL_ASPECT_RATIO failed"; + } + if (SUCCEEDED(hr)) + { + hr = pSinkWriter->SetInputMediaType(streamIndex, pMediaTypeIn, NULL); + if (!SUCCEEDED(hr)) errMsg = "SetInputMediaType failed"; + if(hr == MF_E_INVALIDMEDIATYPE) errMsg.append(": MF_E_INVALIDMEDIATYPE"); + if(hr == MF_E_INVALIDSTREAMNUMBER) errMsg.append(": MF_E_INVALIDSTREAMNUMBER"); + if(hr == MF_E_TOPO_CODEC_NOT_FOUND) errMsg.append(": MF_E_TOPO_CODEC_NOT_FOUND"); + } + + // Tell the sink writer to start accepting data. + if (SUCCEEDED(hr)) + { + hr = pSinkWriter->BeginWriting(); + if (!SUCCEEDED(hr)) errMsg = "BeginWriting failed"; + } + + SafeRelease(&pMediaTypeOut); + SafeRelease(&pMediaTypeIn); + + if(errMsg.size() > 0) + { + throw runtime_error(errMsg); + } + return; +} + +void MfVideoOutFile::CloseFile() +{ + this->CopyFromBufferToOutFile(1); + + if(this->pSinkWriter != NULL) + { + HRESULT hr = this->pSinkWriter->Finalize(); + } + + SafeRelease(&pSinkWriter); + + SafeRelease(&pIByteStream); +} + +void MfVideoOutFile::SendFrame(const char *imgIn, + unsigned imgLen, + const char *pxFmt, + int width, + int height, + unsigned long tv_sec, + unsigned long tv_usec) +{ + if(this->pSinkWriter == NULL) + this->OpenFile(); + + FILETIME timeNow = GetTimeNow(); + if(TimeIsZero(this->startVideoTime)) + { + this->startVideoTime = timeNow; + } + + if(tv_sec == 0 && tv_usec == 0) + { + //Using fixed frame rate and generate time stamps + tv_sec = (unsigned long)(this->rtStart / 1e7); + tv_usec = (unsigned long)((this->rtStart - tv_sec * 1e7)/10. + 0.5); + this->rtStart += this->rtDuration; + } + + //Add frame to output buffer + class FrameMetaData tmp; + this->outBufferMeta.push_back(tmp); + class FrameMetaData &meta = this->outBufferMeta[this->outBufferMeta.size()-1]; + meta.fmt = pxFmt; + meta.width = width; + meta.height = height; + meta.buffLen = imgLen; + meta.tv_sec = tv_sec; + meta.tv_usec = tv_usec; + std::string img(imgIn, imgLen); + this->outBuffer.push_back(img); + + this->CopyFromBufferToOutFile(0); +} + +void MfVideoOutFile::CopyFromBufferToOutFile(int lastFrame) +{ + if(this->outBuffer.size() < 2 && !lastFrame) + return; + if(this->outBuffer.size() == 0) + return; + + std::string &frame = this->outBuffer[0]; + class FrameMetaData &meta = this->outBufferMeta[0]; + class FrameMetaData *metaNext = NULL; + if(this->outBuffer.size() >= 2) + metaNext = &this->outBufferMeta[1]; + + IMFSample *pSample = NULL; + IMFMediaBuffer *pBuffer = NULL; + DWORD cbBuffer = 0; + + if(strcmp(this->pxFmt.c_str(), "BGR24") == 0) + { + LONG cbWidth = 3 * this->outputWidth; + cbBuffer = cbWidth * this->outputHeight; + } + + if(strcmp(this->pxFmt.c_str(), "I420") == 0 || strcmp(this->pxFmt.c_str(), "YV12") == 0) + { + cbBuffer = 1.5 * this->outputHeight * this->outputWidth; + } + + if(cbBuffer==0) + throw std::runtime_error("Unsupported pixel format"); + + BYTE *pData = NULL; + + // Create a new memory buffer. + HRESULT hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer); + + // Lock the buffer and copy the video frame to the buffer. + if (SUCCEEDED(hr)) + { + hr = pBuffer->Lock(&pData, NULL, NULL); + } + if (SUCCEEDED(hr)) + { + if(strcmp(this->pxFmt.c_str(), meta.fmt.c_str())!=0) + { + //std::cout << (long) pData << std::endl; + unsigned int outBuffLen = cbBuffer; + DecodeAndResizeFrame((const unsigned char *)frame.c_str(), frame.size(), meta.fmt.c_str(), + meta.width, meta.height, + this->pxFmt.c_str(), + (unsigned char **)&pData, + &outBuffLen, + this->outputWidth, this->outputHeight); + + //std::cout << (long) pData << std::endl; + } + else + { + DWORD cpyLen = frame.size(); + if(cbBuffer < cpyLen) cpyLen = cbBuffer; + memcpy(pData, frame.c_str(), cpyLen); + } + } + if (pBuffer) + { + pBuffer->Unlock(); + } + + // Set the data length of the buffer. + if (SUCCEEDED(hr)) + { + hr = pBuffer->SetCurrentLength(cbBuffer); + } + + // Create a media sample and add the buffer to the sample. + if (SUCCEEDED(hr)) + { + hr = MFCreateSample(&pSample); + } + if (SUCCEEDED(hr)) + { + hr = pSample->AddBuffer(pBuffer); + } + + // Set the time stamp and the duration. + LONGLONG frameTime = (LONGLONG)meta.tv_sec * (LONGLONG)1e7 + (LONGLONG)meta.tv_usec * 10; + LONGLONG duration = 0; + if(metaNext!=NULL) + { + LONGLONG frameTimeNext = (LONGLONG)metaNext->tv_sec * (LONGLONG)1e7 + (LONGLONG)metaNext->tv_usec * 10; + duration = frameTimeNext - frameTime; + } + else + { + duration = this->prevFrameDuration; + if(duration == 0) duration = (LONGLONG)1e7; //Avoid zero duration frames + } + + if (SUCCEEDED(hr)) + { + hr = pSample->SetSampleTime(frameTime); + } + if (SUCCEEDED(hr)) + { + hr = pSample->SetSampleDuration(duration); + } + + // Send the sample to the Sink Writer. + if (SUCCEEDED(hr) && this->pSinkWriter != NULL) + { + hr = this->pSinkWriter->WriteSample(streamIndex, pSample); + } + + SafeRelease(&pSample); + SafeRelease(&pBuffer); + + this->outBuffer.erase(this->outBuffer.begin()); + this->outBufferMeta.erase(this->outBufferMeta.begin()); + this->prevFrameDuration = duration; +} + +void MfVideoOutFile::Stop() +{ + this->CloseFile(); + +} + +int MfVideoOutFile::WaitForStop() +{ + return 1; +} + +void MfVideoOutFile::SetOutputSize(int width, int height) +{ + if(this->pSinkWriter != NULL) + { + throw std::runtime_error("Set video size before opening video file"); + } + this->outputWidth = width; + this->outputHeight = height; +} + +void MfVideoOutFile::SetOutputPxFmt(const char *fmt) +{ + if(this->pSinkWriter != NULL) + { + throw std::runtime_error("Set video format before opening video file"); + } + if(strcmp(fmt,"BGR24")!=0 && strcmp(fmt,"I420")!=0 && strcmp(fmt,"YV12")!=0) + { + throw std::runtime_error("Only BGR24, YV12 and I420 is supported"); + } + this->pxFmt = fmt; +} + +void MfVideoOutFile::SetFrameRate(unsigned int frameRateIn) +{ + if(this->pSinkWriter != NULL) + { + throw std::runtime_error("Set video parameters before opening video file"); + } + this->frameRateFps = frameRateIn; +} + +void MfVideoOutFile::SetVideoCodec(const char *codec, unsigned int bitrateIn) +{ + if(this->pSinkWriter != NULL) + { + throw std::runtime_error("Set video parameters before opening video file"); + } + if(codec!=NULL) + { + this->videoCodec = codec; + + + + } + if(bitrateIn > 0) + this->bitRate = bitrateIn; +} + +void MfVideoOutFile::Run() +{ + + +} + +//******************************************************************************* + +void *MfVideoOut_File_Worker_thread(void *arg) +{ + class MfVideoOutFile *argobj = (class MfVideoOutFile*) arg; + argobj->Run(); + + return NULL; +} diff --git a/mfvideooutfile.h b/mfvideooutfile.h new file mode 100644 index 0000000..3acf68d --- /dev/null +++ b/mfvideooutfile.h @@ -0,0 +1,55 @@ + +#ifndef MFVIDEOOUT_H +#define MFVIDEOOUT_H + +#include +#include +#include +#include +#include "base.h" + +class MfVideoOutFile : public Base_Video_Out +{ +public: + MfVideoOutFile(const char *devName); + virtual ~MfVideoOutFile(); + + void OpenFile(); + void CloseFile(); + + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height, + unsigned long tv_sec = 0, + unsigned long tv_usec = 0); + void Stop(); + int WaitForStop(); + + virtual void SetOutputSize(int width, int height); + virtual void SetOutputPxFmt(const char *fmt); + virtual void SetFrameRate(unsigned int frameRateIn); + virtual void SetVideoCodec(const char *codec, unsigned int bitrate); + + void MfVideoOutFile::CopyFromBufferToOutFile(int lastFrame = 0); + void Run(); + +protected: + IMFSinkWriter *pSinkWriter; + DWORD streamIndex; + LONGLONG rtStart; + UINT64 rtDuration; + std::string pxFmt; + std::string videoCodec; + std::wstring fina; + + int outputWidth, outputHeight; + UINT32 bitRate, frameRateFps; + FILETIME startVideoTime; + std::vector outBufferMeta; + std::vector outBuffer; + LONGLONG prevFrameDuration; + IMFByteStream *pIByteStream; +}; + +void *MfVideoOut_File_Worker_thread(void *arg); + +#endif //MFVIDEOOUT_H + diff --git a/namedpipeout.cpp b/namedpipeout.cpp new file mode 100644 index 0000000..f27177b --- /dev/null +++ b/namedpipeout.cpp @@ -0,0 +1,473 @@ + +#include "namedpipeout.h" +#include "pixfmt.h" + +#include +#include +using namespace std; + +#include +#include +#include +#define BUFSIZE 1024*1024*10 + +int ProcessClientMessage(class InstanceConfig &instanceConfig); +VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &instanceConfig, class NamedPipeOut *, int frameCount); + +class InstanceConfig +{ +public: + std::string rxBuff; + UINT32 width, height, frameLen; + + InstanceConfig() + { + width = 0; + height = 0; + frameLen = 0; + } +}; + +class ConnectionThreadInfo +{ +public: + HANDLE hPipe; + class NamedPipeOut *parent; + + ConnectionThreadInfo() + { + hPipe = INVALID_HANDLE_VALUE; + parent = NULL; + } +}; + +DWORD WINAPI InstanceThread(LPVOID lpvParam) +// This routine is a thread processing function to read from and reply to a client +// via the open pipe connection passed from the main loop. Note this allows +// the main loop to continue executing, potentially creating more threads of +// of this procedure to run concurrently, depending on the number of incoming +// client connections. +{ + HANDLE hHeap = GetProcessHeap(); + char* pRequest = (char*)HeapAlloc(hHeap, 0, BUFSIZE); + char* pReply = (char*)HeapAlloc(hHeap, 0, BUFSIZE); + + DWORD cbBytesRead = 0, cbReplyBytes = 0, cbWritten = 0; + BOOL fSuccess = FALSE; + HANDLE hPipe = NULL; + class InstanceConfig instanceConfig; + + // Do some extra error checking since the app will keep running even if this + // thread fails. + + if (lpvParam == NULL) + { + printf( "\nERROR - Pipe Server Failure:\n"); + printf( " InstanceThread got an unexpected NULL value in lpvParam.\n"); + printf( " InstanceThread exitting.\n"); + if (pReply != NULL) HeapFree(hHeap, 0, pReply); + if (pRequest != NULL) HeapFree(hHeap, 0, pRequest); + return (DWORD)-1; + } + + if (pRequest == NULL) + { + printf( "\nERROR - Pipe Server Failure:\n"); + printf( " InstanceThread got an unexpected NULL heap allocation.\n"); + printf( " InstanceThread exitting.\n"); + if (pReply != NULL) HeapFree(hHeap, 0, pReply); + return (DWORD)-1; + } + + if (pReply == NULL) + { + printf( "\nERROR - Pipe Server Failure:\n"); + printf( " InstanceThread got an unexpected NULL heap allocation.\n"); + printf( " InstanceThread exitting.\n"); + if (pRequest != NULL) HeapFree(hHeap, 0, pRequest); + return (DWORD)-1; + } + + // Print verbose messages. In production code, this should be for debugging only. + printf("InstanceThread created, receiving and processing messages.\n"); + + // The thread's parameter is a handle to a pipe object instance. + + class ConnectionThreadInfo *info = (class ConnectionThreadInfo *)lpvParam; + class NamedPipeOut *parent = info->parent; + hPipe = info->hPipe; + delete info; + + //Initialise timer + SYSTEMTIME systime; + GetSystemTime(&systime); + FILETIME lastUpdateTime; + SystemTimeToFileTime(&systime, &lastUpdateTime); + int frameCount = 0; + +// Loop until done reading + while (1) + { + SYSTEMTIME systime; + GetSystemTime(&systime); + FILETIME fiTime; + SystemTimeToFileTime(&systime, &fiTime); + LARGE_INTEGER fiTimeNum; + fiTimeNum.HighPart = fiTime.dwHighDateTime; + fiTimeNum.LowPart = fiTime.dwLowDateTime; + LARGE_INTEGER lastUpdate; + lastUpdate.HighPart = lastUpdateTime.dwHighDateTime; + lastUpdate.LowPart = lastUpdateTime.dwLowDateTime; + + LARGE_INTEGER elapse; + elapse.QuadPart = fiTimeNum.QuadPart - lastUpdate.QuadPart; + float elapseMs = elapse.LowPart / 10000.f; + + // Read client requests from the pipe. This simplistic code only allows messages + // up to BUFSIZE characters in length. + fSuccess = ReadFile( + hPipe, // handle to pipe + pRequest, // buffer to receive data + BUFSIZE, // size of buffer + &cbBytesRead, // number of bytes read + NULL); // not overlapped I/O + + if (!fSuccess || cbBytesRead == 0) + { + if (GetLastError() == ERROR_BROKEN_PIPE) + { + _tprintf(TEXT("InstanceThread: client disconnected.\n"), GetLastError()); + } + else + { + _tprintf(TEXT("InstanceThread ReadFile failed, GLE=%d.\n"), GetLastError()); + } + break; + } + + //Process received message + instanceConfig.rxBuff.append(pRequest, cbBytesRead); + + if(elapseMs >= 10.f) + { + ProcessClientMessage(instanceConfig); + + printf("elapse %f\n", elapseMs); + // Get response string + GetAnswerToRequest(pReply, &cbReplyBytes, instanceConfig, parent, frameCount); + frameCount++; + + // Write the reply to the pipe. + fSuccess = WriteFile( + hPipe, // handle to pipe + pReply, // buffer to write from + cbReplyBytes, // number of bytes to write + &cbWritten, // number of bytes written + NULL); // not overlapped I/O + + if (!fSuccess || cbReplyBytes != cbWritten) + { + _tprintf(TEXT("InstanceThread WriteFile failed, GLE=%d.\n"), GetLastError()); + break; + } + lastUpdateTime=fiTime; + } + else + { + Sleep(1); + } + } + +// Flush the pipe to allow the client to read the pipe's contents +// before disconnecting. Then disconnect the pipe, and close the +// handle to this pipe instance. + + FlushFileBuffers(hPipe); + DisconnectNamedPipe(hPipe); + CloseHandle(hPipe); + + HeapFree(hHeap, 0, pRequest); + HeapFree(hHeap, 0, pReply); + + printf("InstanceThread exitting.\n"); + return 1; +} + +VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &instanceConfig, + class NamedPipeOut *parent, int frameCount) +{ + if(instanceConfig.frameLen + 8 < BUFSIZE) + { + //Return frame + UINT32 *numArr = (UINT32 *)pReply; + numArr[0] = 2; + numArr[1] = instanceConfig.frameLen; + unsigned char *imgPix = (unsigned char *)&pReply[8]; + + parent->Lock(); + + //Copy and resize frame if necessary (and invert y) + //TODO use bilinear sampling? + ResizeRgb24ImageNN(parent->currentFrame, parent->currentFrameLen, + parent->currentFrameWidth, + parent->currentFrameHeight, + imgPix, + instanceConfig.frameLen, + instanceConfig.width, instanceConfig.height, 1); + + //memcpy(imgPix, parent->currentFrame, bytesToCopy); + parent->UnLock(); + + *pchBytes = 8 + instanceConfig.frameLen; + } + else + { + //Insufficient space in buffer + UINT32 *numArr = (UINT32 *)pReply; + numArr[0] = 3; + numArr[1] = 0; + *pchBytes = 8; + } +} + +int ProcessClientMessage(class InstanceConfig &instanceConfig) +{ + int count = 0; + int processing = 1; + while(processing && instanceConfig.rxBuff.size() > 8) + { + UINT32 *wordArray = (UINT32 *)instanceConfig.rxBuff.c_str(); + UINT32 msgType = wordArray[0]; + UINT32 msgLen = wordArray[1]; + if(instanceConfig.rxBuff.size() >= 8+msgLen) + { + std::string msg(instanceConfig.rxBuff, 8, msgLen); + UINT32 *msgArray = (UINT32 *)msg.c_str(); + //printf("%d %d %d\n", rxBuff.size(), msgType, msg.size()); + + instanceConfig.rxBuff.assign(instanceConfig.rxBuff, 8+msgLen, instanceConfig.rxBuff.size() - 8 - msgLen); + + if(msgType == 1) + { + instanceConfig.width = msgArray[0]; + instanceConfig.height = msgArray[1]; + instanceConfig.frameLen = msgArray[2]; + count ++; + } + + if(msgType != 1) + { + printf("Buffer corruption detected\n"); + return 0; + } + } + else + { + processing = 0; + } + } + + printf("rx msg count %d\n", count); + printf("w%d h%d buff%d\n",instanceConfig.width, instanceConfig.height, instanceConfig.frameLen); + + return 1; +} + + +//******************************************************************************************* + +NamedPipeOut::NamedPipeOut(const char *devName) : Base_Video_Out() +{ + HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); + if(hr == RPC_E_CHANGED_MODE) + throw std::runtime_error("CoInitializeEx failed"); + + running = 0; + currentFrameAlloc = 0; + currentFrameLen = 0; + currentFrame = NULL; + currentFrameWidth = 0; + currentFrameHeight = 0; + InitializeCriticalSection(&lock); +} + +NamedPipeOut::~NamedPipeOut() +{ + CoUninitialize(); +} + +void NamedPipeOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height, + unsigned long tv_sec, + unsigned long tv_usec) +{ + cout << "NamedPipeOut::SendFrame" << endl; + + //Convert from input pxFmt to BGR24. + unsigned char *bgrBuff = NULL; + unsigned bgrBuffLen = 0; + int ret = DecodeFrame((unsigned char*)imgIn, imgLen, + pxFmt, + width, height, + "BGR24", + &bgrBuff, + &bgrBuffLen); + + if(ret>0 && bgrBuff != NULL) + { + this->Lock(); + if(bgrBuffLen > this->currentFrameAlloc || this->currentFrame == NULL) + { + //Resize current frame buffer + if(this->currentFrame != NULL) delete [] this->currentFrame; + this->currentFrame = new unsigned char [bgrBuffLen]; + this->currentFrameAlloc = bgrBuffLen; + } + + //Copy new frame to local storage + memcpy(this->currentFrame, bgrBuff, bgrBuffLen); + this->currentFrameWidth = width; + this->currentFrameHeight = height; + this->currentFrameLen = bgrBuffLen; + + //Free temporary buffer + delete [] bgrBuff; + bgrBuff = NULL; + bgrBuffLen = 0; + + this->UnLock(); + } + else + { + throw std::runtime_error("Cannot convert pixel format to BGR24"); + } +} + +void NamedPipeOut::Stop() +{ + EnterCriticalSection(&lock); + this->running = 0; + LeaveCriticalSection(&lock); +} + +int NamedPipeOut::WaitForStop() +{ + return 1; +} + +void NamedPipeOut::SetOutputSize(int width, int height) +{ + +} + +void NamedPipeOut::SetOutputPxFmt(const char *fmt) +{ + +} + +void NamedPipeOut::Run() +{ + EnterCriticalSection(&lock); + this->running = 1; + int tmpRunning = this->running; + LeaveCriticalSection(&lock); + + BOOL fConnected = FALSE; + DWORD dwThreadId = 0; + HANDLE hPipe = INVALID_HANDLE_VALUE, hThread = NULL; + LPTSTR lpszPipename = TEXT("\\\\.\\pipe\\testpipe"); + +// Creates an instance of the named pipe and +// then waits for a client to connect to it. When the client +// connects, a thread is created to handle communications +// with that client, and this loop is free to wait for the +// next client connect request. It is an infinite loop. + + while (tmpRunning) + { + _tprintf( TEXT("\nPipe Server: Main thread awaiting client connection on %s\n"), lpszPipename); + hPipe = CreateNamedPipe( + lpszPipename, // pipe name + PIPE_ACCESS_DUPLEX, // read/write access + PIPE_TYPE_BYTE | // message type pipe + PIPE_READMODE_BYTE | // message-read mode + PIPE_WAIT, // blocking mode + PIPE_UNLIMITED_INSTANCES, // max. instances + BUFSIZE, // output buffer size + BUFSIZE, // input buffer size + 0, // client time-out + NULL); // default security attribute + + if (hPipe == INVALID_HANDLE_VALUE) + { + _tprintf(TEXT("CreateNamedPipe failed, GLE=%d.\n"), GetLastError()); + return; + } + + // Wait for the client to connect; if it succeeds, + // the function returns a nonzero value. If the function + // returns zero, GetLastError returns ERROR_PIPE_CONNECTED. + + fConnected = ConnectNamedPipe(hPipe, NULL) ? + TRUE : (GetLastError() == ERROR_PIPE_CONNECTED); + + if (fConnected) + { + printf("Client connected, creating a processing thread.\n"); + + class ConnectionThreadInfo *info = new class ConnectionThreadInfo(); + info->parent = this; + info->hPipe = hPipe; + + // Create a thread for this client. + hThread = CreateThread( + NULL, // no security attribute + 0, // default stack size + InstanceThread, // thread proc + (LPVOID) info, // thread parameter + 0, // not suspended + &dwThreadId); // returns thread ID + + if (hThread == NULL) + { + _tprintf(TEXT("CreateThread failed, GLE=%d.\n"), GetLastError()); + return; + } + else CloseHandle(hThread); + } + else + // The client could not connect, so close the pipe. + CloseHandle(hPipe); + + EnterCriticalSection(&lock); + tmpRunning = this->running; + LeaveCriticalSection(&lock); + } +} + +void NamedPipeOut::Lock() +{ + EnterCriticalSection(&lock); +} + +void NamedPipeOut::UnLock() +{ + LeaveCriticalSection(&lock); +} + + +//******************************************************************************* + +void *NamedPipeOut_Worker_thread(void *arg) +{ + class NamedPipeOut *argobj = (class NamedPipeOut*) arg; + argobj->Run(); + + return NULL; +} + +std::vector List_out_devices() +{ + std::vector out; + out.push_back("VirtualCamera"); + return out; +} diff --git a/namedpipeout.h b/namedpipeout.h new file mode 100644 index 0000000..a872a5c --- /dev/null +++ b/namedpipeout.h @@ -0,0 +1,45 @@ + +#ifndef MFVIDEOOUT_H +#define MFVIDEOOUT_H + +#include +#include +#include +#include "base.h" + +class NamedPipeOut : public Base_Video_Out +{ +public: + NamedPipeOut(const char *devName); + virtual ~NamedPipeOut(); + + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height, + unsigned long tv_sec = 0, + unsigned long tv_usec = 0); + void Stop(); + int WaitForStop(); + + virtual void SetOutputSize(int width, int height); + virtual void SetOutputPxFmt(const char *fmt); + + void Run(); + + unsigned char *currentFrame; + unsigned currentFrameAlloc; + unsigned currentFrameLen; + unsigned currentFrameWidth, currentFrameHeight; + + void Lock(); + void UnLock(); + +protected: + int running; + CRITICAL_SECTION lock; +}; + +void *NamedPipeOut_Worker_thread(void *arg); + +std::vector List_out_devices(); + +#endif //MFVIDEOOUT_H + diff --git a/pixfmt.cpp b/pixfmt.cpp new file mode 100644 index 0000000..fa24d3f --- /dev/null +++ b/pixfmt.cpp @@ -0,0 +1,1100 @@ + +#include +#include +#include +#include +#include +#include +#include +#include "pixfmt.h" + +// ********************************************************************* + +#define HUFFMAN_SEGMENT_LEN 420 + +const char huffmanSegment[HUFFMAN_SEGMENT_LEN+1] = + "\xFF\xC4\x01\xA2\x00\x00\x01\x05\x01\x01\x01\x01" + "\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x01\x02" + "\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x01\x00\x03" + "\x01\x01\x01\x01\x01\x01\x01\x01\x01\x00\x00\x00" + "\x00\x00\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09" + "\x0A\x0B\x10\x00\x02\x01\x03\x03\x02\x04\x03\x05" + "\x05\x04\x04\x00\x00\x01\x7D\x01\x02\x03\x00\x04" + "\x11\x05\x12\x21\x31\x41\x06\x13\x51\x61\x07\x22" + "\x71\x14\x32\x81\x91\xA1\x08\x23\x42\xB1\xC1\x15" + "\x52\xD1\xF0\x24\x33\x62\x72\x82\x09\x0A\x16\x17" + "\x18\x19\x1A\x25\x26\x27\x28\x29\x2A\x34\x35\x36" + "\x37\x38\x39\x3A\x43\x44\x45\x46\x47\x48\x49\x4A" + "\x53\x54\x55\x56\x57\x58\x59\x5A\x63\x64\x65\x66" + "\x67\x68\x69\x6A\x73\x74\x75\x76\x77\x78\x79\x7A" + "\x83\x84\x85\x86\x87\x88\x89\x8A\x92\x93\x94\x95" + "\x96\x97\x98\x99\x9A\xA2\xA3\xA4\xA5\xA6\xA7\xA8" + "\xA9\xAA\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xC2" + "\xC3\xC4\xC5\xC6\xC7\xC8\xC9\xCA\xD2\xD3\xD4\xD5" + "\xD6\xD7\xD8\xD9\xDA\xE1\xE2\xE3\xE4\xE5\xE6\xE7" + "\xE8\xE9\xEA\xF1\xF2\xF3\xF4\xF5\xF6\xF7\xF8\xF9" + "\xFA\x11\x00\x02\x01\x02\x04\x04\x03\x04\x07\x05" + "\x04\x04\x00\x01\x02\x77\x00\x01\x02\x03\x11\x04" + "\x05\x21\x31\x06\x12\x41\x51\x07\x61\x71\x13\x22" + "\x32\x81\x08\x14\x42\x91\xA1\xB1\xC1\x09\x23\x33" + "\x52\xF0\x15\x62\x72\xD1\x0A\x16\x24\x34\xE1\x25" + "\xF1\x17\x18\x19\x1A\x26\x27\x28\x29\x2A\x35\x36" + "\x37\x38\x39\x3A\x43\x44\x45\x46\x47\x48\x49\x4A" + "\x53\x54\x55\x56\x57\x58\x59\x5A\x63\x64\x65\x66" + "\x67\x68\x69\x6A\x73\x74\x75\x76\x77\x78\x79\x7A" + "\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x92\x93\x94" + "\x95\x96\x97\x98\x99\x9A\xA2\xA3\xA4\xA5\xA6\xA7" + "\xA8\xA9\xAA\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA" + "\xC2\xC3\xC4\xC5\xC6\xC7\xC8\xC9\xCA\xD2\xD3\xD4" + "\xD5\xD6\xD7\xD8\xD9\xDA\xE2\xE3\xE4\xE5\xE6\xE7" + "\xE8\xE9\xEA\xF2\xF3\xF4\xF5\xF6\xF7\xF8\xF9\xFA"; + +int ReadJpegFrame(const unsigned char *data, unsigned offset, const unsigned char **twoBytesOut, unsigned *frameStartPosOut, unsigned *cursorOut) +{ + //Based on http://www.gdcl.co.uk/2013/05/02/Motion-JPEG.html + //and https://en.wikipedia.org/wiki/JPEG + + if(data == NULL) + throw std::runtime_error("Input data is null pointer"); + + *twoBytesOut = NULL; + *frameStartPosOut = 0; + *cursorOut = 0; + unsigned cursor = offset; + //Check frame start + unsigned frameStartPos = offset; + const unsigned char *twoBytes = &data[cursor]; + + if (twoBytes[0] != 0xff) + { + //print "Error: found header", map(hex,twoBytes),"at position",cursor + return 0; + } + + cursor = 2 + cursor; + + //Handle padding + int paddingByte = (twoBytes[0] == 0xff && twoBytes[1] == 0xff); + if(paddingByte) + { + *twoBytesOut = twoBytes; + *frameStartPosOut = frameStartPos; + *cursorOut = cursor; + return 1; + } + + //Structure markers with 2 byte length + int markHeader = (twoBytes[0] == 0xff && twoBytes[1] >= 0xd0 && twoBytes[1] <= 0xd9); + if (markHeader) + { + *twoBytesOut = twoBytes; + *frameStartPosOut = frameStartPos; + *cursorOut = cursor; + return 1; + } + + //Determine length of compressed (entropy) data + int compressedDataStart = (twoBytes[0] == 0xff && twoBytes[1] == 0xda); + if (compressedDataStart) + { + unsigned sosLength = ((data[cursor] << 8) + data[cursor+1]); + cursor += sosLength; + + //Seek through frame + int run = 1; + while(run) + { + unsigned char byte = data[cursor]; + cursor += 1; + + if(byte == 0xff) + { + unsigned char byte2 = data[cursor]; + cursor += 1; + if(byte2 != 0x00) + { + if(byte2 >= 0xd0 && byte2 <= 0xd8) + { + //Found restart structure + //print hex(byte), hex(byte2) + } + else + { + //End of frame + run = 0; + cursor -= 2; + } + } + else + { + //Add escaped 0xff value in entropy data + } + } + else + { + + } + } + + *twoBytesOut = twoBytes; + *frameStartPosOut = frameStartPos; + *cursorOut = cursor; + return 1; + } + + //More cursor for all other segment types + unsigned segLength = (data[cursor] << 8) + data[cursor+1]; + cursor += segLength; + *twoBytesOut = twoBytes; + *frameStartPosOut = frameStartPos; + *cursorOut = cursor; + return 1; +} + +int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer) +{ + if(inBufferPtr == NULL) + throw std::runtime_error("Input data is null pointer"); + + int parsing = 1; + unsigned frameStartPos = 0; + int huffFound = 0; + int huffAdded = 0; + + outBuffer.clear(); + + while(parsing) + { + //Check if we should stop + if (frameStartPos >= inBufferLen) + { + parsing = 0; + continue; + } + + //Read the next segment + const unsigned char *twoBytes = NULL; + unsigned frameEndPos=0; + + int ok = ReadJpegFrame(inBufferPtr, frameStartPos, &twoBytes, &frameStartPos, &frameEndPos); + + //if(verbose) + // print map(hex, twoBytes), frameStartPos, frameEndPos; + + //Stop if there is a serious error + if(!ok) + { + return -1; + } + + //Check if this segment is the compressed data + if(twoBytes[0] == 0xff && twoBytes[1] == 0xda && !huffFound) + { + outBuffer.append(huffmanSegment, HUFFMAN_SEGMENT_LEN); + huffAdded = 1; + } + + //Check the type of frame + if(twoBytes[0] == 0xff && twoBytes[1] == 0xc4) + huffFound = 1; + + //Write current structure to output + outBuffer.append((char *)&inBufferPtr[frameStartPos], frameEndPos - frameStartPos); + + //Move cursor + frameStartPos = frameEndPos; + } + if(huffAdded) return 1; + return 0; +} + +// ********************************************************************* + +struct my_error_mgr +{ + struct jpeg_error_mgr pub; /* "public" fields */ + + jmp_buf setjmp_buffer; /* for return to caller */ +}; + +typedef struct my_error_mgr * my_error_ptr; +METHODDEF(void) my_error_exit (j_common_ptr cinfo) +{ + my_error_ptr myerr = (my_error_ptr) cinfo->err; + + /* Always display the message. */ + (*cinfo->err->output_message) (cinfo); + + /* Return control to the setjmp point */ + longjmp(myerr->setjmp_buffer, 1); +} + +int ReadJpegFile(unsigned char * inbuffer, + unsigned long insize, + unsigned char **outBuffer, + unsigned *outBufferSize, + int *widthOut, int *heightOut, int *channelsOut) +{ + /* This struct contains the JPEG decompression parameters and pointers to + * working space (which is allocated as needed by the JPEG library). + */ + + if(inbuffer == NULL) + throw std::runtime_error("Input data is null pointer"); + + if(inbuffer[0] != 0xFF || inbuffer[1] != 0xD8) + return 0; + + struct jpeg_decompress_struct cinfo; + memset(&cinfo, 0x00, sizeof(struct jpeg_decompress_struct)); + struct my_error_mgr jerr; + memset(&jerr, 0x00, sizeof(struct my_error_mgr)); + *widthOut = 0; + *heightOut = 0; + *channelsOut = 0; + + /* More stuff */ + int row_stride; /* physical row width in output buffer */ + + /* Step 1: initialize the JPEG decompression object. */ + cinfo.err = jpeg_std_error(&jerr.pub); + jerr.pub.error_exit = my_error_exit; + /* Establish the setjmp return context for my_error_exit to use. */ + if (setjmp(jerr.setjmp_buffer)) { + /* If we get here, the JPEG code has signaled an error. + * We need to clean up the JPEG object, close the input file, and return. + */ + jpeg_destroy_decompress(&cinfo); + return 0; + } + + jpeg_create_decompress(&cinfo); + + /* Step 2: specify data source */ + jpeg_mem_src(&cinfo, inbuffer, insize); + + /* Step 3: read file parameters with jpeg_read_header() */ + jpeg_read_header(&cinfo, TRUE); + + unsigned int outBuffLen = cinfo.image_width * cinfo.image_height * cinfo.num_components; + if(*outBufferSize != 0 && *outBufferSize != outBuffLen) + throw std::runtime_error("Output buffer has incorrect size"); + *outBufferSize = outBuffLen; + if(*outBuffer == NULL) + *outBuffer = new unsigned char[*outBufferSize]; + *widthOut = cinfo.image_width; + *heightOut = cinfo.image_height; + *channelsOut = cinfo.num_components; + + /* Step 4: set parameters for decompression */ + //Optional + + /* Step 5: Start decompressor */ + jpeg_start_decompress(&cinfo); + /* JSAMPLEs per row in output buffer */ + row_stride = cinfo.output_width * cinfo.output_components; + + /* Step 6: while (scan lines remain to be read) */ + /* jpeg_read_scanlines(...); */ + + /* Here we use the library's state variable cinfo.output_scanline as the + * loop counter, so that we don't have to keep track ourselves. + */ + while (cinfo.output_scanline < cinfo.output_height) { + /* jpeg_read_scanlines expects an array of pointers to scanlines. + * Here the array is only one element long, but you could ask for + * more than one scanline at a time if that's more convenient. + */ + unsigned char *buffer_array[1]; + buffer_array[0] = *outBuffer + cinfo.output_scanline * row_stride; + jpeg_read_scanlines(&cinfo, buffer_array, 1); + + assert(row_stride = cinfo.image_width * cinfo.num_components); + } + + /* Step 7: Finish decompression */ + jpeg_finish_decompress(&cinfo); + + /* Step 8: Release JPEG decompression object */ + + /* This is an important step since it will release a good deal of memory. */ + jpeg_destroy_decompress(&cinfo); + + /* At this point you may want to check to see whether any corrupt-data + * warnings occurred (test whether jerr.pub.num_warnings is nonzero). + */ + + return 1; +} + +// ************************************************************** + +int ConvertRGBtoYUYVorSimilar(const unsigned char *im, unsigned sizeimage, + unsigned width, unsigned height, const char *targetPxFmt, + unsigned char **outIm, unsigned *outImSize) +{ + unsigned bytesperline = width * 2; + unsigned padding = 0; + if(*outImSize != 0 && *outImSize != sizeimage) + throw std::runtime_error("Output buffer has incorrect size"); + unsigned char *outBuff = *outIm; + if(*outIm == NULL) + { + outBuff = new unsigned char [*outImSize]; + *outIm = outBuff; + } + *outImSize = sizeimage+padding; + unsigned char *im2 = (unsigned char *)im; + + int uOffset = 0; + int vOffset = 0; + int yOffset1 = 0; + int yOffset2 = 0; + int formatKnown = 0; + + if(strcmp(targetPxFmt, "YUYV")==0) + { + uOffset = 1; + vOffset = 3; + yOffset1 = 0; + yOffset2 = 2; + formatKnown = 1; + } + + if(strcmp(targetPxFmt, "UYVY")==0) + { + uOffset = 0; + vOffset = 2; + yOffset1 = 1; + yOffset2 = 3; + formatKnown = 1; + } + + if(!formatKnown) + { + throw std::runtime_error("Unknown target pixel format"); + } + + for (unsigned y=0; y= dataLen) {throw std::runtime_error("1");} + if(rgbInOffset2+2 >= dataLen) {throw std::runtime_error("2");} + if(rgbInOffset3+2 >= dataLen) {throw std::runtime_error("3");} + if(rgbInOffset4+2 >= dataLen) {throw std::runtime_error("4");} + + unsigned Y1 = 66 * im[rgbInOffset1] + 129 * im[rgbInOffset1+1] + 25 * im[rgbInOffset1+2]; + unsigned Y2 = 66 * im[rgbInOffset2] + 129 * im[rgbInOffset2+1] + 25 * im[rgbInOffset2+2]; + unsigned Y3 = 66 * im[rgbInOffset3] + 129 * im[rgbInOffset3+1] + 25 * im[rgbInOffset3+2]; + unsigned Y4 = 66 * im[rgbInOffset4] + 129 * im[rgbInOffset4+1] + 25 * im[rgbInOffset4+2]; + + unsigned U1 = -38 * im[rgbInOffset1] - 74 * im[rgbInOffset1+1] + 112 * im[rgbInOffset1+2]; + unsigned U2 = -38 * im[rgbInOffset2] - 74 * im[rgbInOffset2+1] + 112 * im[rgbInOffset2+2]; + unsigned U3 = -38 * im[rgbInOffset3] - 74 * im[rgbInOffset3+1] + 112 * im[rgbInOffset3+2]; + unsigned U4 = -38 * im[rgbInOffset4] - 74 * im[rgbInOffset4+1] + 112 * im[rgbInOffset4+2]; + + unsigned V1 = 112 * im[rgbInOffset1] - 94 * im[rgbInOffset1+1] - 18 * im[rgbInOffset1+2]; + unsigned V2 = 112 * im[rgbInOffset2] - 94 * im[rgbInOffset2+1] - 18 * im[rgbInOffset2+2]; + unsigned V3 = 112 * im[rgbInOffset3] - 94 * im[rgbInOffset3+1] - 18 * im[rgbInOffset3+2]; + unsigned V4 = 112 * im[rgbInOffset4] - 94 * im[rgbInOffset4+1] - 18 * im[rgbInOffset4+2]; + + Y1 = ((Y1 + 128) >> 8) + 16; + Y2 = ((Y2 + 128) >> 8) + 16; + Y3 = ((Y3 + 128) >> 8) + 16; + Y4 = ((Y4 + 128) >> 8) + 16; + + U1 = ((U1 + 128) >> 8) + 128; + U2 = ((U2 + 128) >> 8) + 128; + U3 = ((U3 + 128) >> 8) + 128; + U4 = ((U4 + 128) >> 8) + 128; + + V1 = ((V1 + 128) >> 8) + 128; + V2 = ((V2 + 128) >> 8) + 128; + V3 = ((V3 + 128) >> 8) + 128; + V4 = ((V4 + 128) >> 8) + 128; + + if(YOutOffset1 >= *buffOutLen) {throw std::runtime_error("5");} + if(YOutOffset2 >= *buffOutLen) {throw std::runtime_error("6");} + if(YOutOffset3 >= *buffOutLen) {throw std::runtime_error("7");} + if(YOutOffset4 >= *buffOutLen) {throw std::runtime_error("8");} + if(VOutOffset >= *buffOutLen) {throw std::runtime_error("9");} + if(UOutOffset >= *buffOutLen) {throw std::runtime_error("10");} + + (*buffOut)[YOutOffset1] = Y1; + (*buffOut)[YOutOffset2] = Y2; + (*buffOut)[YOutOffset3] = Y3; + (*buffOut)[YOutOffset4] = Y4; + + (*buffOut)[VOutOffset] = (unsigned char)((V1+V2+V3+V4)/4.+0.5); + (*buffOut)[UOutOffset] = (unsigned char)((U1+U2+U3+U4)/4.+0.5); + } + } + + return 1; +} + +int ConvertYUYVtoRGB(const unsigned char *im, unsigned dataLen, + int width, int height, + unsigned char **buffOut, + unsigned *buffOutLen) +{ + // Convert buffer from YUYV to RGB. + // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm + // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm + unsigned int outBuffLen = dataLen * 6 / 4; + if(*buffOutLen != 0 && *buffOutLen != outBuffLen) + throw std::runtime_error("Output buffer has incorrect length"); + *buffOutLen = outBuffLen; + char *rgb = (char*)*buffOut; + if(*buffOut == NULL) + { + rgb = new char[*buffOutLen]; + *buffOut = (unsigned char*)rgb; + } + + char *rgb_max = rgb + *buffOutLen; + const unsigned char *yuyv = im; + +#define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) + while(rgb < rgb_max) + { + int u = yuyv[1] - 128; + int v = yuyv[3] - 128; + int uv = 100 * u + 208 * v; + u *= 516; + v *= 409; + + int y = 298 * (yuyv[0] - 16); + rgb[0] = CLAMP(y + v); + rgb[1] = CLAMP(y - uv); + rgb[2] = CLAMP(y + u); + + y = 298 * (yuyv[2] - 16); + rgb[3] = CLAMP(y + v); + rgb[4] = CLAMP(y - uv); + rgb[5] = CLAMP(y + u); + + rgb += 6; + yuyv += 4; + } +#undef CLAMP + return 1; +} + +// ********************************************************************* + +int DecodeFrame(const unsigned char *data, unsigned dataLen, + const char *inPxFmt, + int &width, int &height, + const char *targetPxFmt, + unsigned char **buffOut, + unsigned *buffOutLen) +{ + //Check if input format and output format match + if(strcmp(inPxFmt, targetPxFmt) == 0) + { + //Conversion not required, return a copy + if (*buffOutLen != 0 && *buffOutLen != dataLen) + { + throw std::runtime_error("Output buffer has incorrect size"); + } + if(*buffOut == NULL) + { + *buffOut = new unsigned char[dataLen]; + } + *buffOutLen = dataLen; + memcpy(*buffOut, data, dataLen); + return 1; + } + + //MJPEG frame to RGB24 + if(strcmp(inPxFmt,"MJPEG")==0 && strcmp(targetPxFmt, "RGB24")==0) + { + std::string jpegBin; + int huffRet = InsertHuffmanTableCTypes(data, dataLen, jpegBin); + + unsigned char *decodedBuff = NULL; + unsigned decodedBuffSize = 0; + int widthActual = 0, heightActual = 0, channelsActual = 0; + + if(0) + { + //Save bin data to file for debug + FILE *jpegout = fopen("debug.jpg","wb"); + fwrite(jpegBin.c_str(), 1, jpegBin.length(), jpegout); + fclose(jpegout); + } + + int jpegOk = ReadJpegFile((unsigned char*)jpegBin.c_str(), jpegBin.length(), + &decodedBuff, + &decodedBuffSize, + &widthActual, &heightActual, &channelsActual); + + if (!jpegOk) + throw std::runtime_error("Error decoding jpeg"); + + if((widthActual == width && heightActual == height) || width == 0 || height == 0) + { + assert(channelsActual == 3); + *buffOut = decodedBuff; + *buffOutLen = decodedBuffSize; + width = widthActual; + height = heightActual; + } + else + { + delete [] decodedBuff; + throw std::runtime_error("Decoded jpeg has unexpected size"); + } + return 1; + } + + //YUYV to RGB24 + if(strcmp(inPxFmt,"YUYV")==0 && strcmp(targetPxFmt, "RGB24")==0) + { + int ret = ConvertYUYVtoRGB(data, dataLen, + width, height, + buffOut, + buffOutLen); + return ret; + } + + //RGB24 to I420 or YV12 + if(strcmp(inPxFmt,"RGB24")==0 && + (strcmp(targetPxFmt, "I420")==0 || strcmp(targetPxFmt, "YV12")==0)) + { + int ret = ConvertRgb24ToI420orYV12(data, dataLen, + width, height, + buffOut, buffOutLen, targetPxFmt); + return ret; + } + + //RGB24 to YUYV or UYVY + if(strcmp(inPxFmt,"RGB24")==0 && + (strcmp(targetPxFmt, "YUYV")==0 + || strcmp(targetPxFmt, "UYVY")==0) + ) + { + int ret = ConvertRGBtoYUYVorSimilar(data, dataLen, + width, height, targetPxFmt, + buffOut, buffOutLen); + return ret; + } + + //RGB24 -> BGR24 + if(strcmp(inPxFmt,"RGB24")==0 && strcmp(targetPxFmt, "BGR24")==0) + { + if(*buffOutLen != 0 && *buffOutLen != dataLen) + throw std::runtime_error("Output buffer has incorrect size"); + if(*buffOut == NULL) + *buffOut = new unsigned char[dataLen]; + *buffOutLen = dataLen; + for(unsigned i = 0; i+2 < dataLen; i+=3) + { + (*buffOut)[i+0] = data[i+2]; + (*buffOut)[i+1] = data[i+1]; + (*buffOut)[i+2] = data[i+0]; + } + return 1; + } + + //If no direct conversion to BGR24 is possible, convert to RGB24 + //as an intermediate step + if(strcmp(targetPxFmt, "BGR24")==0) + { + unsigned char *rgbBuff = NULL; + unsigned rgbBuffLen = 0; + int ret = DecodeFrame(data, dataLen, + inPxFmt, + width, height, + "RGB24", + &rgbBuff, + &rgbBuffLen); + + if(ret>0) + { + int ret2 = DecodeFrame(rgbBuff, rgbBuffLen, + "RBG24", + width, height, + targetPxFmt, + buffOut, + buffOutLen); + delete [] rgbBuff; + if(ret2>0) return ret2; + } + } + + //Destination of RGB24INV, so convert to RGB24 first + if(strcmp(targetPxFmt, "RGB24INV")==0) + { + unsigned char *rgbBuff = NULL; + unsigned rgbBuffLen = 0; + int ret = DecodeFrame(data, dataLen, + inPxFmt, + width, height, + "RGB24", + &rgbBuff, + &rgbBuffLen); + + if(ret>0) + { + int ret2 = VerticalFlipRgb24(rgbBuff, rgbBuffLen, + width, height, + buffOut, + buffOutLen); + delete [] rgbBuff; + if(ret2>0) return ret2; + } + } + + //Vertical flip of RGB24 + if(strcmp(inPxFmt, "RGB24INV")==0) + { + unsigned char *rgbBuff = NULL; + unsigned rgbBuffLen = 0; + int ret = VerticalFlipRgb24(data, dataLen, + width, height, + &rgbBuff, + &rgbBuffLen); + + if(ret>0) + { + int ret2 = DecodeFrame(rgbBuff, rgbBuffLen, + "RGB24", + width, height, + targetPxFmt, + buffOut, + buffOutLen); + delete [] rgbBuff; + if(ret2>0) return ret2; + } + } + + /* + //Untested code + if((strcmp(inPxFmt,"YUV2")==0 || strcmp(inPxFmt,"YVU2")==0) + && strcmp(targetPxFmt, "RGB24")==0) + { + int uoff = 1; + int voff = 3; + if(strcmp(inPxFmt,"YUV2")==0) + { + uoff = 1; + voff = 3; + } + if(strcmp(inPxFmt,"YVU2")==0) + { + uoff = 3; + voff = 1; + } + + int stride = width * 4; + int hwidth = width/2; + for(int lineNum=0; lineNum < height; lineNum++) + { + int lineOffset = lineNum * stride; + int outOffset = lineNum * width * 3; + + for(int pxPairNum=0; pxPairNum < hwidth; pxPairNum++) + { + unsigned char Y1 = data[pxPairNum * 4 + lineOffset]; + unsigned char Cb = data[pxPairNum * 4 + lineOffset + uoff]; + unsigned char Y2 = data[pxPairNum * 4 + lineOffset + 2]; + unsigned char Cr = data[pxPairNum * 4 + lineOffset + voff]; + + //ITU-R BT.601 colour conversion + double R1 = (Y1 + 1.402 * (Cr - 128)); + double G1 = (Y1 - 0.344 * (Cb - 128) - 0.714 * (Cr - 128)); + double B1 = (Y1 + 1.772 * (Cb - 128)); + double R2 = (Y2 + 1.402 * (Cr - 128)); + double G2 = (Y2 - 0.344 * (Cb - 128) - 0.714 * (Cr - 128)); + double B2 = (Y2 + 1.772 * (Cb - 128)); + + (*buffOut)[outOffset + pxPairNum * 6] = R1; + (*buffOut)[outOffset + pxPairNum * 6 + 1] = G1; + (*buffOut)[outOffset + pxPairNum * 6 + 2] = B1; + (*buffOut)[outOffset + pxPairNum * 6 + 3] = R2; + (*buffOut)[outOffset + pxPairNum * 6 + 4] = G2; + (*buffOut)[outOffset + pxPairNum * 6 + 5] = B2; + } + } + } + */ + + return 0; +} + +// ************* Resize Code ******************************* + +int ResizeRgb24ImageNN(const unsigned char *data, unsigned dataLen, + int widthIn, int heightIn, + unsigned char *buffOut, + unsigned buffOutLen, + int widthOut, int heightOut, int invertVertical, int tupleLen) +{ + //Simple crop of image to target buffer + for(int x = 0; x < widthOut; x++) + { + for(int y = 0; y < heightOut; y++) + { + unsigned outOffset = x*tupleLen + (y*tupleLen*widthOut); + if(outOffset + tupleLen >= buffOutLen) continue; + unsigned char *outPx = &buffOut[outOffset]; + + //Scale position + double inx = (double)x * (double)widthIn / (double)widthOut; + double iny = (double)y * (double)heightIn / (double)heightOut; + + //Round to nearest pixel + int inxi = (int)(inx+0.5); + int inyi = (int)(iny+0.5); + + int row = inyi; + if(invertVertical) row = heightIn - inyi - 1; + unsigned inOffset = inxi*tupleLen + (row*tupleLen*widthIn); + if(inOffset + tupleLen >= dataLen) continue; + const unsigned char *inPx = &data[inOffset]; + + for(int c = 0; c < tupleLen; c++) + outPx[c] = inPx[c]; + } + + } + + return 1; +} + +int CropToFitRgb24Image(const unsigned char *data, unsigned dataLen, + int widthIn, int heightIn, + unsigned char *buffOut, + unsigned buffOutLen, + int widthOut, int heightOut, int invertVertical, int tupleLen = 3) +{ + //Simple crop of image to target buffer + for(int x = 0; x < widthOut; x++) + { + for(int y = 0; y < heightOut; y++) + { + unsigned outOffset = x*tupleLen + (y*tupleLen*widthOut); + if(outOffset + tupleLen >= buffOutLen) continue; + unsigned char *outPx = &buffOut[outOffset]; + + int row = y; + if(invertVertical) row = heightIn - y - 1; + unsigned inOffset = x*tupleLen + (row*tupleLen*widthIn); + if(inOffset + tupleLen >= dataLen) continue; + const unsigned char *inPx = &data[inOffset]; + + for(int c = 0; c < tupleLen; c++) + outPx[c] = inPx[c]; + } + } + + return 1; +} + +int ResizeFrame(const unsigned char *data, + unsigned dataLen, + const char *pxFmt, + int srcWidth, int srcHeight, + unsigned char **buffOut, + unsigned *buffOutLen, + int dstWidth, + int dstHeight) +{ + if(strcmp(pxFmt,"RGB24")==0 || strcmp(pxFmt,"BGR24")==0) + { + //Allocate new buffer if needed + int dstBuffSize = 3 * dstWidth * dstHeight; + if(*buffOutLen != 0 && *buffOutLen != dstBuffSize) + throw std::runtime_error("Output buffer has incorrect size"); + *buffOutLen = dstBuffSize; + if(*buffOut == NULL) + *buffOut = new unsigned char [*buffOutLen]; + + return ResizeRgb24ImageNN(data, dataLen, + srcWidth, srcHeight, + *buffOut, + *buffOutLen, + dstWidth, dstHeight, 0, 3); + } + //Not supported + return 0; +} + +/// ************************************************************** + +int VerticalFlipRgb24(const unsigned char *im, unsigned dataLen, + int width, int height, + unsigned char **buffOut, + unsigned *buffOutLen) +{ + //RGB24 -> RGB24INV + //RGB24INV -> RGB24 + + if(dataLen != width * height * 3) + throw std::runtime_error("Input buffer has incorrect size"); + if(*buffOutLen != 0 && *buffOutLen != dataLen) + throw std::runtime_error("Output buffer has incorrect size"); + if(*buffOut == NULL) + *buffOut = new unsigned char[dataLen]; + *buffOutLen = dataLen; + + for(int y = 0; y < height; y++) + { + int invy = height - y - 1; + const unsigned char *inRow = &im[y * width * 3]; + unsigned char *outRow = &((*buffOut)[invy * width * 3]); + memcpy(outRow, inRow, width * 3); + } + return 1; +} + +// ****** Combined resize and convert ************************************************* + + +int DecodeAndResizeFrame(const unsigned char *data, + unsigned dataLen, + const char *inPxFmt, + int srcWidth, int srcHeight, + const char *targetPxFmt, + unsigned char **buffOut, + unsigned *buffOutLen, + int &dstWidth, + int &dstHeight) +{ + const unsigned char *currentImg = data; + int decallocateWhenDone = 0; + unsigned currentLen = dataLen; + std::string currentPxFmt = inPxFmt; + int currentWidth = srcWidth; + int currentHeight = srcHeight; + unsigned char *tmpBuff = NULL; + unsigned tmpBuffLen = 0; + + if(currentWidth==0 || currentHeight==0) + { + //Source has unknown dimensions + int ret = DecodeFrame(currentImg, currentLen, + currentPxFmt.c_str(), + currentWidth, currentHeight, + targetPxFmt, + &tmpBuff, + &tmpBuffLen); + + //Free intermediate buff + //probably not needed at this stage but good consistency + if(decallocateWhenDone && currentImg != NULL) + { + delete [] currentImg; + currentImg = NULL; + currentLen = 0; + } + + currentImg = tmpBuff; + currentLen = tmpBuffLen; + currentPxFmt = targetPxFmt; + decallocateWhenDone = 1; + } + + if((currentWidth==dstWidth && currentHeight==dstHeight) || dstWidth == 0 || dstHeight == 0) + { + //Resize is not required + int ret = DecodeFrame(currentImg, currentLen, + currentPxFmt.c_str(), + currentWidth, currentHeight, + targetPxFmt, + buffOut, + buffOutLen); + + //Free intermediate buff + if(decallocateWhenDone && currentImg != NULL) + { + delete [] currentImg; + currentImg = NULL; + currentLen = 0; + } + + dstWidth = currentWidth; + dstHeight = currentHeight; + + return ret; + } + + int resizeRet = ResizeFrame(currentImg, + currentLen, + currentPxFmt.c_str(), + currentWidth, currentHeight, + &tmpBuff, + &tmpBuffLen, + dstWidth, + dstHeight); + + if(resizeRet > 0) + { + //Free intermediate buff + if(decallocateWhenDone && currentImg != NULL) + { + delete [] currentImg; + currentImg = NULL; + currentLen = 0; + } + + //Resize succeeded + currentImg = tmpBuff; + decallocateWhenDone = 1; + currentLen = tmpBuffLen; + currentWidth = dstWidth; + currentHeight = dstHeight; + + int decodeRet = DecodeFrame(currentImg, currentLen, + currentPxFmt.c_str(), + currentWidth, currentHeight, + targetPxFmt, + buffOut, + buffOutLen); + + //Free intermediate buff + if(decallocateWhenDone && currentImg != NULL) + { + delete [] currentImg; + currentImg = NULL; + currentLen = 0; + } + + return decodeRet; + } + + //Attempt to convert pixel format first, do resize later + tmpBuff = NULL; + tmpBuffLen = 0; + int decodeRet = DecodeFrame(data, dataLen, + inPxFmt, + srcWidth, srcHeight, + targetPxFmt, + &tmpBuff, + &tmpBuffLen); + + if(decodeRet <= 0) + { + //Free intermediate buff + if(decallocateWhenDone && currentImg != NULL) + { + delete [] currentImg; + currentImg = NULL; + currentLen = 0; + } + return 0; //Conversion failed + } + + //Now resize + resizeRet = ResizeFrame(tmpBuff, + tmpBuffLen, + targetPxFmt, + srcWidth, srcHeight, + buffOut, + buffOutLen, + dstWidth, + dstHeight); + + //Free intermediate buff + if(decallocateWhenDone && currentImg != NULL) + { + delete [] currentImg; + currentImg = NULL; + currentLen = 0; + } + + return resizeRet; +} diff --git a/pixfmt.h b/pixfmt.h new file mode 100644 index 0000000..e7275b3 --- /dev/null +++ b/pixfmt.h @@ -0,0 +1,47 @@ + +#ifndef _PIXFMT_H_ +#define _PIXFMT_H_ + +#include + +int DecodeFrame(const unsigned char *data, unsigned dataLen, + const char *inPxFmt, + int &width, int &height, + const char *targetPxFmt, + unsigned char **buffOut, + unsigned *buffOutLen); + +int DecodeAndResizeFrame(const unsigned char *data, + unsigned dataLen, + const char *inPxFmt, + int srcWidth, int srcHeight, + const char *targetPxFmt, + unsigned char **buffOut, + unsigned *buffOutLen, + int &dstWidth, + int &dstHeight); + +int ResizeFrame(const unsigned char *data, + unsigned dataLen, + const char *pxFmt, + int srcWidth, int srcHeight, + unsigned char **buffOut, + unsigned *buffOutLen, + int dstWidth, + int dstHeight); + +int ResizeRgb24ImageNN(const unsigned char *data, unsigned dataLen, + int widthIn, int heightIn, + unsigned char *buffOut, + unsigned buffOutLen, + int widthOut, int heightOut, int invertVertical = 0, int tupleLen = 3); + +int VerticalFlipRgb24(const unsigned char *im, unsigned dataLen, + int width, int height, + unsigned char **buffOut, + unsigned *buffOutLen); + +int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer); + +#endif //_PIXFMT_H_ + diff --git a/setup.py b/setup.py index 8f08f8d..c90adfe 100755 --- a/setup.py +++ b/setup.py @@ -2,27 +2,58 @@ # # python-v4l2capture # -# 2009, 2010, 2011 Fredrik Portstrom +# python-v4l2capture +# Python extension to capture video with video4linux2 # -# I, the copyright holder of this file, hereby release it into the -# public domain. This applies worldwide. In case this is not legally -# possible: I grant anyone the right to use this work for any -# purpose, without any conditions, unless such conditions are -# required by law. +# 2009, 2010, 2011 Fredrik Portstrom, released into the public domain +# 2011, Joakim Gebart +# 2013, Tim Sheerman-Chase +# See README for license + +#SET VS90COMNTOOLS=%VS100COMNTOOLS% +#python setup.py build -c msvc +#python setup.py install from distutils.core import Extension, setup +import os + +debug = 0 + +if os.name == "nt": + if debug: + c_args=['/Zi', '/EHsc'] + l_args=["/MANIFEST", "/DEBUG"] + else: + c_args=[] + l_args=["/MANIFEST"] + + videolive = Extension("videolive", ["mfvideooutfile.cpp", "pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", "mfvideoin.cpp", "namedpipeout.cpp", + "videooutfile.cpp"], + define_macros=[('_'+os.name.upper(), None)], + library_dirs=['C:\Dev\Lib\libjpeg-turbo-win\lib', "C:\Dev\Lib\pthreads\pthreads.2"], + include_dirs=['C:\Dev\Lib\libjpeg-turbo-win\include', "C:\Dev\Lib\pthreads\pthreads.2"], + extra_compile_args=c_args, + extra_link_args=l_args, + libraries = ["pthreadVC2", "jpeg", "Mfplat", "Mf", "Mfreadwrite", "Ole32", "mfuuid", "Shlwapi"]) + +else: + videolive = Extension("videolive", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", + "videooutfile.cpp"], + define_macros=[('_'+os.name.upper(), None)], + libraries = ["v4l2", "pthread", "jpeg"]) + setup( - name = "v4l2capture", - version = "1.4", - author = "Fredrik Portstrom", - author_email = "fredrik@jemla.se", - url = "http://fredrik.jemla.eu/v4l2capture", - description = "Capture video with video4linux2", - long_description = "python-v4l2capture is a slim and easy to use Python " - "extension for capturing video with video4linux2.", - license = "Public Domain", + name = "videolive", + version = "1.0", + author = "Tim Sheerman-Chase", + author_email = "info@kinatomic", + url = "http://www.kinatomic.com", + description = "Capture and stream video", + long_description = "Capture and stream video in python", + license = "GPL v2 or later", classifiers = [ - "License :: Public Domain", - "Programming Language :: C"], - ext_modules = [ - Extension("v4l2capture", ["v4l2capture.c"], libraries = ["v4l2"])]) + "License :: GPL", + "Programming Language :: C++"], + ext_modules = [videolive] + ) + diff --git a/v4l2capture.c b/v4l2capture.c deleted file mode 100644 index dabd38c..0000000 --- a/v4l2capture.c +++ /dev/null @@ -1,549 +0,0 @@ -// python-v4l2capture -// Python extension to capture video with video4linux2 -// -// 2009, 2010, 2011 Fredrik Portstrom -// -// I, the copyright holder of this file, hereby release it into the -// public domain. This applies worldwide. In case this is not legally -// possible: I grant anyone the right to use this work for any -// purpose, without any conditions, unless such conditions are -// required by law. - -#define USE_LIBV4L - -#include -#include -#include -#include - -#ifdef USE_LIBV4L -#include -#else -#include -#define v4l2_close close -#define v4l2_ioctl ioctl -#define v4l2_mmap mmap -#define v4l2_munmap munmap -#define v4l2_open open -#endif - -#define ASSERT_OPEN if(self->fd < 0) \ - { \ - PyErr_SetString(PyExc_ValueError, \ - "I/O operation on closed file"); \ - return NULL; \ - } - -struct buffer { - void *start; - size_t length; -}; - -typedef struct { - PyObject_HEAD - int fd; - struct buffer *buffers; - int buffer_count; -} Video_device; - -struct capability { - int id; - const char *name; -}; - -static struct capability capabilities[] = { - { V4L2_CAP_ASYNCIO, "asyncio" }, - { V4L2_CAP_AUDIO, "audio" }, - { V4L2_CAP_HW_FREQ_SEEK, "hw_freq_seek" }, - { V4L2_CAP_RADIO, "radio" }, - { V4L2_CAP_RDS_CAPTURE, "rds_capture" }, - { V4L2_CAP_READWRITE, "readwrite" }, - { V4L2_CAP_SLICED_VBI_CAPTURE, "sliced_vbi_capture" }, - { V4L2_CAP_SLICED_VBI_OUTPUT, "sliced_vbi_output" }, - { V4L2_CAP_STREAMING, "streaming" }, - { V4L2_CAP_TUNER, "tuner" }, - { V4L2_CAP_VBI_CAPTURE, "vbi_capture" }, - { V4L2_CAP_VBI_OUTPUT, "vbi_output" }, - { V4L2_CAP_VIDEO_CAPTURE, "video_capture" }, - { V4L2_CAP_VIDEO_OUTPUT, "video_output" }, - { V4L2_CAP_VIDEO_OUTPUT_OVERLAY, "video_output_overlay" }, - { V4L2_CAP_VIDEO_OVERLAY, "video_overlay" } -}; - -static int my_ioctl(int fd, int request, void *arg) -{ - // Retry ioctl until it returns without being interrupted. - - for(;;) - { - int result = v4l2_ioctl(fd, request, arg); - - if(!result) - { - return 0; - } - - if(errno != EINTR) - { - PyErr_SetFromErrno(PyExc_IOError); - return 1; - } - } -} - -static void Video_device_unmap(Video_device *self) -{ - int i; - - for(i = 0; i < self->buffer_count; i++) - { - v4l2_munmap(self->buffers[i].start, self->buffers[i].length); - } -} - -static void Video_device_dealloc(Video_device *self) -{ - if(self->fd >= 0) - { - if(self->buffers) - { - Video_device_unmap(self); - } - - v4l2_close(self->fd); - } - - self->ob_type->tp_free((PyObject *)self); -} - -static int Video_device_init(Video_device *self, PyObject *args, - PyObject *kwargs) -{ - const char *device_path; - - if(!PyArg_ParseTuple(args, "s", &device_path)) - { - return -1; - } - - int fd = v4l2_open(device_path, O_RDWR | O_NONBLOCK); - - if(fd < 0) - { - PyErr_SetFromErrnoWithFilename(PyExc_IOError, (char *)device_path); - return -1; - } - - self->fd = fd; - self->buffers = NULL; - return 0; -} - -static PyObject *Video_device_close(Video_device *self) -{ - if(self->fd >= 0) - { - if(self->buffers) - { - Video_device_unmap(self); - } - - v4l2_close(self->fd); - self->fd = -1; - } - - Py_RETURN_NONE; -} - -static PyObject *Video_device_fileno(Video_device *self) -{ - ASSERT_OPEN; - return PyInt_FromLong(self->fd); -} - -static PyObject *Video_device_get_info(Video_device *self) -{ - ASSERT_OPEN; - struct v4l2_capability caps; - - if(my_ioctl(self->fd, VIDIOC_QUERYCAP, &caps)) - { - return NULL; - } - - PyObject *set = PySet_New(NULL); - - if(!set) - { - return NULL; - } - - struct capability *capability = capabilities; - - while((void *)capability < (void *)capabilities + sizeof(capabilities)) - { - if(caps.capabilities & capability->id) - { - PyObject *s = PyString_FromString(capability->name); - - if(!s) - { - Py_DECREF(set); - return NULL; - } - - PySet_Add(set, s); - } - - capability++; - } - - return Py_BuildValue("sssO", caps.driver, caps.card, caps.bus_info, set); -} - -static PyObject *Video_device_set_format(Video_device *self, PyObject *args) -{ - int size_x; - int size_y; - int yuv420 = 0; - - if(!PyArg_ParseTuple(args, "ii|i", &size_x, &size_y, &yuv420)) - { - return NULL; - } - - struct v4l2_format format; - format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - format.fmt.pix.width = size_x; - format.fmt.pix.height = size_y; -#ifdef USE_LIBV4L - format.fmt.pix.pixelformat = - yuv420 ? V4L2_PIX_FMT_YUV420 : V4L2_PIX_FMT_RGB24; -#else - format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; -#endif - format.fmt.pix.field = V4L2_FIELD_INTERLACED; - format.fmt.pix.bytesperline = 0; - - if(my_ioctl(self->fd, VIDIOC_S_FMT, &format)) - { - return NULL; - } - - return Py_BuildValue("ii", format.fmt.pix.width, format.fmt.pix.height); -} - -static PyObject *Video_device_set_fps(Video_device *self, PyObject *args) -{ - int fps; - if(!PyArg_ParseTuple(args, "i", &fps)) - { - return NULL; - } - struct v4l2_streamparm setfps; - memset(&setfps, 0, sizeof(struct v4l2_streamparm)); - setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - setfps.parm.capture.timeperframe.numerator = 1; - setfps.parm.capture.timeperframe.denominator = fps; - if(my_ioctl(self->fd, VIDIOC_S_PARM, &setfps)){ - return NULL; - } - return Py_BuildValue("i",setfps.parm.capture.timeperframe.denominator); -} - -static PyObject *Video_device_start(Video_device *self) -{ - ASSERT_OPEN; - enum v4l2_buf_type type; - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - - if(my_ioctl(self->fd, VIDIOC_STREAMON, &type)) - { - return NULL; - } - - Py_RETURN_NONE; -} - -static PyObject *Video_device_stop(Video_device *self) -{ - ASSERT_OPEN; - enum v4l2_buf_type type; - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - - if(my_ioctl(self->fd, VIDIOC_STREAMOFF, &type)) - { - return NULL; - } - - Py_RETURN_NONE; -} - -static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args) -{ - int buffer_count; - - if(!PyArg_ParseTuple(args, "I", &buffer_count)) - { - return NULL; - } - - ASSERT_OPEN; - - if(self->buffers) - { - PyErr_SetString(PyExc_ValueError, "Buffers are already created"); - return NULL; - } - - struct v4l2_requestbuffers reqbuf; - reqbuf.count = buffer_count; - reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - reqbuf.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_REQBUFS, &reqbuf)) - { - return NULL; - } - - if(!reqbuf.count) - { - PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); - return NULL; - } - - self->buffers = malloc(reqbuf.count * sizeof(struct buffer)); - - if(!self->buffers) - { - PyErr_NoMemory(); - return NULL; - } - - int i; - - for(i = 0; i < reqbuf.count; i++) - { - struct v4l2_buffer buffer; - buffer.index = i; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_QUERYBUF, &buffer)) - { - return NULL; - } - - self->buffers[i].length = buffer.length; - self->buffers[i].start = v4l2_mmap(NULL, buffer.length, - PROT_READ | PROT_WRITE, MAP_SHARED, self->fd, buffer.m.offset); - - if(self->buffers[i].start == MAP_FAILED) - { - PyErr_SetFromErrno(PyExc_IOError); - return NULL; - } - } - - self->buffer_count = i; - Py_RETURN_NONE; -} - -static PyObject *Video_device_queue_all_buffers(Video_device *self) -{ - if(!self->buffers) - { - ASSERT_OPEN; - PyErr_SetString(PyExc_ValueError, "Buffers have not been created"); - return NULL; - } - - int i; - int buffer_count = self->buffer_count; - - for(i = 0; i < buffer_count; i++) - { - struct v4l2_buffer buffer; - buffer.index = i; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) - { - return NULL; - } - } - - Py_RETURN_NONE; -} - -static PyObject *Video_device_read_internal(Video_device *self, int queue) -{ - if(!self->buffers) - { - ASSERT_OPEN; - PyErr_SetString(PyExc_ValueError, "Buffers have not been created"); - return NULL; - } - - struct v4l2_buffer buffer; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_DQBUF, &buffer)) - { - return NULL; - } - -#ifdef USE_LIBV4L - PyObject *result = PyString_FromStringAndSize( - self->buffers[buffer.index].start, buffer.bytesused); - - if(!result) - { - return NULL; - } -#else - // Convert buffer from YUYV to RGB. - // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm - // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm - int length = buffer.bytesused * 6 / 4; - PyObject *result = PyString_FromStringAndSize(NULL, length); - - if(!result) - { - return NULL; - } - - char *rgb = PyString_AS_STRING(result); - char *rgb_max = rgb + length; - unsigned char *yuyv = self->buffers[buffer.index].start; - -#define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) - while(rgb < rgb_max) - { - int u = yuyv[1] - 128; - int v = yuyv[3] - 128; - int uv = 100 * u + 208 * v; - u *= 516; - v *= 409; - - int y = 298 * (yuyv[0] - 16); - rgb[0] = CLAMP(y + v); - rgb[1] = CLAMP(y - uv); - rgb[2] = CLAMP(y + u); - - y = 298 * (yuyv[2] - 16); - rgb[3] = CLAMP(y + v); - rgb[4] = CLAMP(y - uv); - rgb[5] = CLAMP(y + u); - - rgb += 6; - yuyv += 4; - } -#undef CLAMP -#endif - - if(queue && my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) - { - return NULL; - } - - return result; -} - -static PyObject *Video_device_read(Video_device *self) -{ - return Video_device_read_internal(self, 0); -} - -static PyObject *Video_device_read_and_queue(Video_device *self) -{ - return Video_device_read_internal(self, 1); -} - -static PyMethodDef Video_device_methods[] = { - {"close", (PyCFunction)Video_device_close, METH_NOARGS, - "close()\n\n" - "Close video device. Subsequent calls to other methods will fail."}, - {"fileno", (PyCFunction)Video_device_fileno, METH_NOARGS, - "fileno() -> integer \"file descriptor\".\n\n" - "This enables video devices to be passed select.select for waiting " - "until a frame is available for reading."}, - {"get_info", (PyCFunction)Video_device_get_info, METH_NOARGS, - "get_info() -> driver, card, bus_info, capabilities\n\n" - "Returns three strings with information about the video device, and one " - "set containing strings identifying the capabilities of the video " - "device."}, - {"set_format", (PyCFunction)Video_device_set_format, METH_VARARGS, - "set_format(size_x, size_y, yuv420 = 0) -> size_x, size_y\n\n" - "Request the video device to set image size and format. The device may " - "choose another size than requested and will return its choice. The " - "image format will be RGB24 if yuv420 is false (default) or YUV420 if " - "yuv420 is true."}, - {"set_fps", (PyCFunction)Video_device_set_fps, METH_VARARGS, - "set_fps(fps) -> fps \n\n" - "Request the video device to set frame per seconds.The device may " - "choose another frame rate than requested and will return its choice. " }, - {"start", (PyCFunction)Video_device_start, METH_NOARGS, - "start()\n\n" - "Start video capture."}, - {"stop", (PyCFunction)Video_device_stop, METH_NOARGS, - "stop()\n\n" - "Stop video capture."}, - {"create_buffers", (PyCFunction)Video_device_create_buffers, METH_VARARGS, - "create_buffers(count)\n\n" - "Create buffers used for capturing image data. Can only be called once " - "for each video device object."}, - {"queue_all_buffers", (PyCFunction)Video_device_queue_all_buffers, - METH_NOARGS, - "queue_all_buffers()\n\n" - "Let the video device fill all buffers created."}, - {"read", (PyCFunction)Video_device_read, METH_NOARGS, - "read() -> string\n\n" - "Reads image data from a buffer that has been filled by the video " - "device. The image data is in RGB och YUV420 format as decided by " - "'set_format'. The buffer is removed from the queue. Fails if no buffer " - "is filled. Use select.select to check for filled buffers."}, - {"read_and_queue", (PyCFunction)Video_device_read_and_queue, METH_NOARGS, - "read_and_queue()\n\n" - "Same as 'read', but adds the buffer back to the queue so the video " - "device can fill it again."}, - {NULL} -}; - -static PyTypeObject Video_device_type = { - PyObject_HEAD_INIT(NULL) - 0, "v4l2capture.Video_device", sizeof(Video_device), 0, - (destructor)Video_device_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, Py_TPFLAGS_DEFAULT, "Video_device(path)\n\nOpens the video device at " - "the given path and returns an object that can capture images. The " - "constructor and all methods except close may raise IOError.", 0, 0, 0, - 0, 0, 0, Video_device_methods, 0, 0, 0, 0, 0, 0, 0, - (initproc)Video_device_init -}; - -static PyMethodDef module_methods[] = { - {NULL} -}; - -PyMODINIT_FUNC initv4l2capture(void) -{ - Video_device_type.tp_new = PyType_GenericNew; - - if(PyType_Ready(&Video_device_type) < 0) - { - return; - } - - PyObject *module = Py_InitModule3("v4l2capture", module_methods, - "Capture video with video4linux2."); - - if(!module) - { - return; - } - - Py_INCREF(&Video_device_type); - PyModule_AddObject(module, "Video_device", (PyObject *)&Video_device_type); -} diff --git a/v4l2capture.cpp b/v4l2capture.cpp new file mode 100644 index 0000000..d2539c9 --- /dev/null +++ b/v4l2capture.cpp @@ -0,0 +1,747 @@ + +#include "v4l2capture.h" + +#define USE_LIBV4L + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "pixfmt.h" + +#ifdef USE_LIBV4L +#include +#else +#include +#define v4l2_close close +#define v4l2_ioctl ioctl +#define v4l2_mmap mmap +#define v4l2_munmap munmap +#define v4l2_open open +#endif + +#define ASSERT_OPEN if(self->fd < 0) \ + { \ + PyErr_SetString(PyExc_ValueError, \ + "I/O operation on closed file"); \ + Py_RETURN_NONE; \ + } + +int my_ioctl(int fd, int request, void *arg, int utimeout = -1) +{ + // Retry ioctl until it returns without being interrupted. + + for(;;) + { + // Wait for frame until time out + if(utimeout >= 0) + { + + fd_set fds; + FD_ZERO (&fds); + FD_SET (fd, &fds); + + struct timeval tv; + tv.tv_sec = 0; + tv.tv_usec = utimeout; + int r = select(fd+1, &fds, NULL, NULL, &tv); + + if(r == 0) + { + return 1; //Timed out + } + } + + //printf("call\n"); + int result = v4l2_ioctl(fd, request, arg); + + if(!result) + { + //printf("ret\n"); + return 0; + } + + if(errno == EAGAIN) + { + //printf("ret\n"); + return 1; + } + + if(errno != EINTR) + { + return 1; + } + usleep(1000); + } +} + +std::wstring CharArrayToWString(const char *in) +{ + size_t inLen = strlen(in)+1; + wchar_t *tmpDevName = new wchar_t[inLen]; + //size_t returnValue; + mbstowcs(tmpDevName, in, inLen); + //mbstowcs_s(&returnValue, tmpDevName, inLen, in, inLen); + std::wstring tmpDevName2(tmpDevName); + delete [] tmpDevName; + return tmpDevName2; +} + +static void enumerate_menu (int fd, struct v4l2_queryctrl &queryctrl) +{ + struct v4l2_querymenu querymenu; + std::cout << " Menu items:" << std::endl; + + memset (&querymenu, 0, sizeof (querymenu)); + querymenu.id = queryctrl.id; + + for (querymenu.index = queryctrl.minimum; + querymenu.index <= queryctrl.maximum; + querymenu.index++) { + if (0 == my_ioctl (fd, VIDIOC_QUERYMENU, &querymenu)) { + std::cout << " " << querymenu.index << " " << querymenu.name << std::endl; + } else { + std::cout << " Error VIDIOC_QUERYMENU" << std::endl; + } + } +} + +// ************************************************************************** + +Video_in_Manager::Video_in_Manager(const char *devNameIn) : Base_Video_In() +{ + stop = 0; + stopped = 1; + deviceStarted = 0; + this->devName = devNameIn; + pthread_mutex_init(&lock, NULL); + buffer_counts = 10; + buffers = NULL; + stopDeviceFlag = 0; + closeDeviceFlag = 0; + frameWidth = 0; + frameHeight = 0; + decodedFrameBuffMaxSize = 10; + verbose = 0; + targetFmt = "RGB24"; +} + +Video_in_Manager::~Video_in_Manager() +{ + if(deviceStarted) + { + this->StopDeviceInternal(); + } + + if(fd!=-1) + { + this->CloseDeviceInternal(); + } + + if(buffers) delete [] buffers; + this->buffers = NULL; + + for(unsigned int i=0; idecodedFrameBuff[i]; + } + this->decodedFrameBuff.clear(); + + pthread_mutex_destroy(&lock); +} + +void Video_in_Manager::Stop() +{ + pthread_mutex_lock(&this->lock); + this->stop = 1; + pthread_mutex_unlock(&this->lock); +} + +void Video_in_Manager::WaitForStop() +{ + while(1) + { + pthread_mutex_lock(&this->lock); + int s = this->stopped; + pthread_mutex_unlock(&this->lock); + + if(s) return; + usleep(10000); + } +} + +void Video_in_Manager::OpenDevice() +{ + pthread_mutex_lock(&this->lock); + this->openDeviceFlag.push_back(this->devName.c_str()); + pthread_mutex_unlock(&this->lock); +} + +void Video_in_Manager::SetFormat(const char *fmt, int width, int height) +{ + class SetFormatParams params; + params.fmt = fmt; + params.width = width; + params.height = height; + + pthread_mutex_lock(&this->lock); + this->setFormatFlags.push_back(params); + pthread_mutex_unlock(&this->lock); +} + +void Video_in_Manager::StartDevice(int buffer_count) +{ + pthread_mutex_lock(&this->lock); + this->startDeviceFlag.push_back(buffer_count); + pthread_mutex_unlock(&this->lock); +} + +void Video_in_Manager::StopDevice() +{ + pthread_mutex_lock(&this->lock); + this->stopDeviceFlag = 1; + pthread_mutex_unlock(&this->lock); +} + +void Video_in_Manager::CloseDevice() +{ + pthread_mutex_lock(&this->lock); + this->closeDeviceFlag = 1; + pthread_mutex_unlock(&this->lock); +} + +int Video_in_Manager::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) +{ + pthread_mutex_lock(&this->lock); + if(this->decodedFrameBuff.size()==0) + { + //No frame found + *buffOut = NULL; + metaOut = NULL; + pthread_mutex_unlock(&this->lock); + return 0; + } + + //Return frame + *buffOut = this->decodedFrameBuff[0]; + *metaOut = this->decodedFrameMetaBuff[0]; + this->decodedFrameBuff.erase(this->decodedFrameBuff.begin()); + this->decodedFrameMetaBuff.erase(this->decodedFrameMetaBuff.begin()); + pthread_mutex_unlock(&this->lock); + return 1; +} + +int Video_in_Manager::ReadFrame() +{ + if(this->fd<0) + throw std::runtime_error("File not open"); + + if(this->buffers == NULL) + throw std::runtime_error("Buffers have not been created"); + + struct v4l2_buffer buffer; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(this->fd, VIDIOC_DQBUF, &buffer, 10000)) + { + return 0; + } + + unsigned char *rgbBuff = NULL; + unsigned rgbBuffLen = 0; + int ok = DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused, + this->pxFmt.c_str(), + this->frameWidth, + this->frameHeight, + this->targetFmt.c_str(), &rgbBuff, &rgbBuffLen); + + //Return a frame, decoded or not + pthread_mutex_lock(&this->lock); + + class FrameMetaData meta; + meta.width = this->frameWidth; + meta.height = this->frameHeight; + if(ok && rgbBuff != NULL) + { + meta.fmt = this->targetFmt; + meta.buffLen = rgbBuffLen; + this->decodedFrameBuff.push_back(rgbBuff); + } + else + { + //Make a copy of un-decodable buffer to return + unsigned char* buffOut = new unsigned char[buffer.bytesused]; + memcpy(buffOut, this->buffers[buffer.index].start, buffer.bytesused); + meta.fmt = this->pxFmt; + meta.buffLen = buffer.bytesused; + this->decodedFrameBuff.push_back(buffOut); + } + meta.sequence = buffer.sequence; + meta.tv_sec = buffer.timestamp.tv_sec; + meta.tv_usec = buffer.timestamp.tv_usec; + + this->decodedFrameMetaBuff.push_back(meta); + while(this->decodedFrameBuff.size() > this->decodedFrameBuffMaxSize) + { + this->decodedFrameBuff.erase(this->decodedFrameBuff.begin()); + this->decodedFrameMetaBuff.erase(this->decodedFrameMetaBuff.begin()); + } + pthread_mutex_unlock(&this->lock); + + //Queue buffer for next frame + if(my_ioctl(this->fd, VIDIOC_QBUF, &buffer)) + { + throw std::runtime_error("VIDIOC_QBUF failed"); + } + + return 1; +} + +int Video_in_Manager::OpenDeviceInternal() +{ + if(verbose) printf("OpenDeviceInternal\n"); + //Open the video device. + this->fd = v4l2_open(this->devName.c_str(), O_RDWR | O_NONBLOCK); + + if(fd < 0) + { + throw std::runtime_error("Error opening device"); + } + + this->deviceStarted = 0; + if(verbose) printf("Done opening\n"); + return 1; +} + +void Video_in_Manager::Test() +{ + /*struct v4l2_streamparm streamparm; + memset (&streamparm, 0, sizeof (streamparm)); + streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + //Check if camera supports timeperframe + if(my_ioctl(this->fd, VIDIOC_G_PARM, &streamparm)) + { + throw std::runtime_error("VIDIOC_G_PARM failed"); + } + int timePerFrameSupported = (V4L2_CAP_TIMEPERFRAME & streamparm.parm.capture.capability) != 0; + if(timePerFrameSupported) + { + + //Enurate framerates + //struct v4l2_frmivalenum frmrates; + //memset (&frmrates, 0, sizeof (v4l2_frmivalenum)); + //my_ioctl(this->fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmrates); + //std::cout << "fr " << frmrates.discrete.numerator << "," << frmrates.discrete.denominator << std::endl; + + //Set frame rate + struct v4l2_fract *tpf = &streamparm.parm.capture.timeperframe; + tpf->numerator = 1; + tpf->denominator = 30; + if(my_ioctl(this->fd, VIDIOC_S_PARM, &streamparm)) + { + throw std::runtime_error("VIDIOC_S_PARM failed"); + } + + } + + //Query controls + struct v4l2_queryctrl queryctrl; + queryctrl.id = V4L2_CID_EXPOSURE_AUTO; + my_ioctl (this->fd, VIDIOC_QUERYCTRL, &queryctrl); + if (!(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)) + { + std::cout << "Control "<fd, queryctrl); + } +*/ +/* //Read control + struct v4l2_control control; + memset (&control, 0, sizeof (control)); + control.id = V4L2_CID_EXPOSURE_AUTO; + my_ioctl (fd, VIDIOC_QUERYCTRL, &control); + std::cout << "val1 " << control.value << std::endl;*/ +/* + //Set control + memset (&control, 0, sizeof (control)); + control.id = V4L2_CID_EXPOSURE_AUTO; + control.value = V4L2_EXPOSURE_MANUAL; + std::cout << "ret " << my_ioctl (fd, VIDIOC_S_CTRL, &control) << std::endl; + + //Confirm value + memset (&control, 0, sizeof (control)); + control.id = V4L2_CID_EXPOSURE_AUTO; + my_ioctl (fd, VIDIOC_QUERYCTRL, &control); + std::cout << "val2 " << control.value << std::endl;*/ +} + +int Video_in_Manager::SetFormatInternal(class SetFormatParams &args) +{ + if(verbose) printf("SetFormatInternal\n"); + //int size_x, int size_y, const char *fmt; + + struct v4l2_format format; + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + format.fmt.pix.width = args.width; + format.fmt.pix.height = args.height; + format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + + if(strcmp(args.fmt.c_str(), "MJPEG")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; + if(strcmp(args.fmt.c_str(), "RGB24")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + if(strcmp(args.fmt.c_str(), "YUV420")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; + if(strcmp(args.fmt.c_str(), "YVU420")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YVU420; + if(strcmp(args.fmt.c_str(), "YUYV")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; + + format.fmt.pix.field = V4L2_FIELD_NONE; + format.fmt.pix.bytesperline = 0; + + if(my_ioctl(this->fd, VIDIOC_S_FMT, &format)) + { + return 0; + } + + //Store pixel format for decoding usage later + //this->pxFmt = args.fmt; + //this->frameWidth = args.width; + //this->frameHeight = args.height; + this->GetFormatInternal(); + + return 1; +} + +int Video_in_Manager::GetFormatInternal() +{ + struct v4l2_format format; + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if(my_ioctl(this->fd, VIDIOC_G_FMT, &format)) + { + return 0; + } + + this->frameWidth = format.fmt.pix.width; + this->frameHeight = format.fmt.pix.height; + + switch(format.fmt.pix.pixelformat) + { + case V4L2_PIX_FMT_MJPEG: + this->pxFmt = "MJPEG"; + break; + case V4L2_PIX_FMT_RGB24: + this->pxFmt = "RGB24"; + break; + case V4L2_PIX_FMT_YUV420: + this->pxFmt = "YUV420"; + break; + case V4L2_PIX_FMT_YVU420: + this->pxFmt = "YVU420"; + break; + case V4L2_PIX_FMT_YUYV: + this->pxFmt = "YUYV"; + break; + default: + this->pxFmt = "Unknown "; + std::ostringstream oss; + oss << format.fmt.pix.pixelformat; + this->pxFmt.append(oss.str()); + + break; + } + + if(verbose) printf("Current format %s %i %i\n", this->pxFmt.c_str(), this->frameWidth, this->frameHeight); + return 1; +} + +int Video_in_Manager::StartDeviceInternal(int buffer_count = 10) +{ + if(verbose) printf("StartDeviceInternal\n"); + //Check this device has not already been start + if(this->fd==-1) + { + throw std::runtime_error("Device not open"); + } + + //Set other parameters for capture + //TODO + + /* + //Query current pixel format + self.size_x, self.size_y, self.pixelFmt = self.video.get_format() + + //Set target frames per second + self.fps = self.video.set_fps(reqFps) + */ + + // Create a buffer to store image data in. This must be done before + // calling 'start' if v4l2capture is compiled with libv4l2. Otherwise + // raises IOError. + + if(this->pxFmt.length()==0) + { + //Get current pixel format + //TODO + int ret = GetFormatInternal(); + if(!ret) throw std::runtime_error("Could not determine image format"); + } + + struct v4l2_requestbuffers reqbuf; + reqbuf.count = buffer_count; + reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + reqbuf.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(this->fd, VIDIOC_REQBUFS, &reqbuf)) + { + throw std::runtime_error("VIDIOC_REQBUFS failed"); + } + + if(!reqbuf.count) + { + throw std::runtime_error("Not enough buffer memory"); + } + + this->buffers = new struct buffer [reqbuf.count]; + + if(this->buffers == NULL) + { + throw std::runtime_error("Failed to allocate buffer memory"); + } + + for(unsigned int i = 0; i < reqbuf.count; i++) + { + struct v4l2_buffer buffer; + buffer.index = i; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(fd, VIDIOC_QUERYBUF, &buffer)) + { + throw std::runtime_error("VIDIOC_QUERYBUF failed"); + } + + this->buffers[i].length = buffer.length; + this->buffers[i].start = v4l2_mmap(NULL, buffer.length, + PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.offset); + + if(this->buffers[i].start == MAP_FAILED) + { + throw std::runtime_error("v4l2_mmap failed"); + } + } + + this->buffer_counts = reqbuf.count; + + // Send the buffer to the device. Some devices require this to be done + // before calling 'start'. + + for(int i = 0; i < buffer_count; i++) + { + struct v4l2_buffer buffer; + buffer.index = i; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) + { + //This may fail with some devices but does not seem to be harmful. + } + } + + // Start the device. This lights the LED if it's a camera that has one. + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if(my_ioctl(fd, VIDIOC_STREAMON, &type)) + { + throw std::runtime_error("VIDIOC_STREAMON failed"); + } + + this->Test(); + + this->deviceStarted = 1; + if(verbose) printf("Started ok\n"); + return 1; +} + +void Video_in_Manager::StopDeviceInternal() +{ + if(verbose) printf("StopDeviceInternal\n"); + if(this->fd==-1) + { + throw std::runtime_error("Device not started"); + } + + //Signal V4l2 api + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if(my_ioctl(this->fd, VIDIOC_STREAMOFF, &type)) + { + throw std::runtime_error("VIDIOC_STREAMOFF failed"); + } + + this->deviceStarted = 0; +} + +int Video_in_Manager::CloseDeviceInternal() +{ + if(verbose) printf("CloseDeviceInternal\n"); + if(this->fd == -1) + { + throw std::runtime_error("Device not open"); + } + + if(this->deviceStarted) + StopDeviceInternal(); + + if(this->buffers!= NULL) + { + for(int i = 0; i < this->buffer_counts; i++) + { + v4l2_munmap(this->buffers[i].start, this->buffers[i].length); + } + delete [] this->buffers; + } + this->buffers = NULL; + + //Release memory + v4l2_close(fd); + fd = -1; + return 1; +} + +void Video_in_Manager::Run() +{ + if(verbose) printf("Thread started: %s\n", this->devName.c_str()); + int running = 1; + pthread_mutex_lock(&this->lock); + this->stopped = 0; + pthread_mutex_unlock(&this->lock); + + try + { + while(running) + { + //printf("Sleep\n"); + usleep(1000); + + if(deviceStarted) this->ReadFrame(); + + pthread_mutex_lock(&this->lock); + try + { + + if(this->openDeviceFlag.size() > 0) + { + std::string devName = this->openDeviceFlag[this->openDeviceFlag.size()-1]; + this->openDeviceFlag.pop_back(); + this->OpenDeviceInternal(); + } + + if(this->setFormatFlags.size() > 0 + && this->openDeviceFlag.size() == 0) + { + class SetFormatParams params = this->setFormatFlags[this->setFormatFlags.size()-1]; + this->setFormatFlags.pop_back(); + this->SetFormatInternal(params); + } + + if(this->startDeviceFlag.size() > 0 + && this->openDeviceFlag.size() == 0 + && this->setFormatFlags.size() == 0) + { + int buffer_count = this->startDeviceFlag[this->startDeviceFlag.size()-1]; + this->startDeviceFlag.pop_back(); + this->StartDeviceInternal(buffer_count); + } + + if(this->stopDeviceFlag + && this->openDeviceFlag.size() == 0 + && this->setFormatFlags.size() == 0 + && this->startDeviceFlag.size() == 0) + { + this->StopDeviceInternal(); + this->stopDeviceFlag = 0; + } + + if(this->closeDeviceFlag + && this->openDeviceFlag.size() == 0 + && this->setFormatFlags.size() == 0 + && this->startDeviceFlag.size() == 0 + && !this->stopDeviceFlag) + { + this->CloseDeviceInternal(); + this->closeDeviceFlag = 0; + } + + running = !this->stop; + } + catch(std::exception &err) + { + if(verbose) printf("An exception has occured: %s\n", err.what()); + running = 0; + } + pthread_mutex_unlock(&this->lock); + } + } + catch(std::exception &err) + { + if(verbose) printf("An exception has occured: %s\n", err.what()); + } + + if(verbose) printf("Thread stopping\n"); + pthread_mutex_lock(&this->lock); + this->stopped = 1; + pthread_mutex_unlock(&this->lock); +} + +void *Video_in_Worker_thread(void *arg) +{ + class Video_in_Manager *argobj = (class Video_in_Manager*) arg; + argobj->Run(); + + return NULL; +} + +std::vector > List_in_devices() +{ + std::vector > out; + const char dir[] = "/dev"; + DIR *dp; + struct dirent *dirp; + if((dp = opendir(dir)) == NULL) { + printf("Error(%d) opening %s\n", errno, dir); + return out; + } + + while ((dirp = readdir(dp)) != NULL) { + if (strncmp(dirp->d_name, "video", 5) != 0) continue; + std::string tmp = "/dev/"; + std::vector row; + tmp.append(dirp->d_name); + + std::wstring tmpDevName = CharArrayToWString(tmp.c_str()); + row.push_back(tmpDevName); + out.push_back(row); + } + closedir(dp); + return out; +} + + + diff --git a/v4l2capture.h b/v4l2capture.h new file mode 100644 index 0000000..3f35477 --- /dev/null +++ b/v4l2capture.h @@ -0,0 +1,134 @@ +// python-v4l2capture +// Python extension to capture video with video4linux2 +// +// 2009, 2010, 2011 Fredrik Portstrom, released into the public domain +// 2011, Joakim Gebart +// 2013, Tim Sheerman-Chase +// See README for license + +#ifndef V4L2CAPTURE_H +#define V4L2CAPTURE_H + +#include +#include +#include +#include "base.h" + +struct buffer { + void *start; + size_t length; +}; + +struct capability { + int id; + const char *name; +}; + + +class SetFormatParams +{ +public: + std::string fmt; + int width, height; + + SetFormatParams() + { + width = 0; + height = 0; + } + + SetFormatParams(const SetFormatParams &in) + { + SetFormatParams::operator=(in); + } + + const SetFormatParams &operator=(const SetFormatParams &in) + { + width = in.width; + height = in.height; + fmt = in.fmt; + return *this; + } +}; + +/*static struct capability capabilities[] = { + { V4L2_CAP_ASYNCIO, "asyncio" }, + { V4L2_CAP_AUDIO, "audio" }, + { V4L2_CAP_HW_FREQ_SEEK, "hw_freq_seek" }, + { V4L2_CAP_RADIO, "radio" }, + { V4L2_CAP_RDS_CAPTURE, "rds_capture" }, + { V4L2_CAP_READWRITE, "readwrite" }, + { V4L2_CAP_SLICED_VBI_CAPTURE, "sliced_vbi_capture" }, + { V4L2_CAP_SLICED_VBI_OUTPUT, "sliced_vbi_output" }, + { V4L2_CAP_STREAMING, "streaming" }, + { V4L2_CAP_TUNER, "tuner" }, + { V4L2_CAP_VBI_CAPTURE, "vbi_capture" }, + { V4L2_CAP_VBI_OUTPUT, "vbi_output" }, + { V4L2_CAP_VIDEO_CAPTURE, "video_capture" }, + { V4L2_CAP_VIDEO_OUTPUT, "video_output" }, + { V4L2_CAP_VIDEO_OUTPUT_OVERLAY, "video_output_overlay" }, + { V4L2_CAP_VIDEO_OVERLAY, "video_overlay" } +};*/ + +int my_ioctl(int fd, int request, void *arg, int utimeout); + +class Video_in_Manager : public Base_Video_In +{ +public: + //Device_manager *self; + std::string devName; + int stop; + int stopped; + pthread_mutex_t lock; + std::vector openDeviceFlag; + std::vector startDeviceFlag; + std::vector setFormatFlags; + int stopDeviceFlag; + int closeDeviceFlag; + int deviceStarted; + int fd; + struct buffer *buffers; + int frameWidth, frameHeight; + int buffer_counts; + std::string pxFmt; + int verbose; + std::string targetFmt; + + std::vector decodedFrameBuff; + std::vector decodedFrameMetaBuff; + unsigned decodedFrameBuffMaxSize; + + Video_in_Manager(const char *devNameIn); + virtual ~Video_in_Manager(); + void Stop(); + void WaitForStop(); + void OpenDevice(); + void SetFormat(const char *fmt, int width, int height); + void StartDevice(int buffer_count); + void StopDevice(); + void CloseDevice(); + int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut); + + void Test(); + +protected: + int ReadFrame(); + int OpenDeviceInternal(); + int SetFormatInternal(class SetFormatParams &args); + int GetFormatInternal(); + int StartDeviceInternal(int buffer_count); + void StopDeviceInternal(); + int CloseDeviceInternal(); + +public: + void Run(); +}; + +void *Video_in_Worker_thread(void *arg); + +std::vector > List_in_devices(); + +// ********************************************************************** + +#endif //V4L2CAPTURE_H + diff --git a/v4l2out.cpp b/v4l2out.cpp new file mode 100644 index 0000000..ee5e975 --- /dev/null +++ b/v4l2out.cpp @@ -0,0 +1,392 @@ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "v4l2out.h" +#include "pixfmt.h" + +#define ROUND_UP_2(num) (((num)+1)&~1) +#define ROUND_UP_4(num) (((num)+3)&~3) +#define ROUND_UP_8(num) (((num)+7)&~7) + +void print_format(struct v4l2_format*vid_format) { + printf(" vid_format->type =%d\n", vid_format->type ); + printf(" vid_format->fmt.pix.width =%d\n", vid_format->fmt.pix.width ); + printf(" vid_format->fmt.pix.height =%d\n", vid_format->fmt.pix.height ); + printf(" vid_format->fmt.pix.pixelformat =%d\n", vid_format->fmt.pix.pixelformat); + printf(" vid_format->fmt.pix.sizeimage =%d\n", vid_format->fmt.pix.sizeimage ); + printf(" vid_format->fmt.pix.field =%d\n", vid_format->fmt.pix.field ); + printf(" vid_format->fmt.pix.bytesperline=%d\n", vid_format->fmt.pix.bytesperline ); + printf(" vid_format->fmt.pix.colorspace =%d\n", vid_format->fmt.pix.colorspace ); +} + +class SendFrameArgs +{ +public: + unsigned imgLen; + std::string pxFmt; + unsigned width; + unsigned height; + unsigned long tv_sec; + unsigned long tv_usec; + + SendFrameArgs() + { + imgLen = 0; + width = 0; + height = 0; + tv_sec = 0; + tv_usec = 0; + } + + SendFrameArgs(const SendFrameArgs &in) + { + SendFrameArgs::operator=(in); + } + + const SendFrameArgs &operator=(const SendFrameArgs &in) + { + width = in.width; + height = in.height; + imgLen = in.imgLen; + pxFmt = in.pxFmt; + tv_sec = in.tv_sec; + tv_usec = in.tv_usec; + return *this; + } +}; + +//******************************************************************* + +Video_out::Video_out(const char *devNameIn) : Base_Video_Out() +{ + this->fdwr = 0; + framesize = 0; + stop = 0; + stopped = 1; + verbose = 1; + this->devName = devNameIn; + pthread_mutex_init(&lock, NULL); + currentFrame = NULL; + outputWidth = 640; + outputHeight = 480; + outputPxFmt = "YUYV"; + + lastFrameTime_sec = 0; + lastFrameTime_usec = 0; + + struct sigevent sevp; + memset(&sevp, 0, sizeof(struct sigevent)); + sevp.sigev_notify = SIGEV_NONE; + +} + +Video_out::~Video_out() +{ + for(unsigned i=0; isendFrameBuffer.size(); i++) + { + delete [] this->sendFrameBuffer[i]; + } + this->sendFrameBuffer.clear(); + + if(this->currentFrame!=NULL) + delete [] this->currentFrame; + this->currentFrame = NULL; + + pthread_mutex_destroy(&lock); +} + +void Video_out::SendFrameInternal() +{ + const char* buff = NULL; + class SendFrameArgs args; + + pthread_mutex_lock(&this->lock); + if(this->sendFrameBuffer.size()>=1) + { + //Get oldest frame + buff = this->sendFrameBuffer[0]; + args = this->sendFrameArgs[0]; + + //Remove frame from buffer + this->sendFrameBuffer.erase(this->sendFrameBuffer.begin()); + this->sendFrameArgs.erase(this->sendFrameArgs.begin()); + } + pthread_mutex_unlock(&this->lock); + + //Check time since previous frame send + long int secSinceLastFrame = args.tv_sec - this->lastFrameTime_sec; + long int nsecSinceLastFrame = args.tv_usec - this->lastFrameTime_usec; + if(nsecSinceLastFrame < 0) + { + secSinceLastFrame -= 1; + nsecSinceLastFrame *= -1; + } + + if(buff != NULL) + { + //Convert new frame to correct size and pixel format + assert(strcmp(args.pxFmt.c_str(), "RGB24")==0); + unsigned resizeBuffLen = this->outputWidth * this->outputHeight * 3; + char *buffResize = new char[resizeBuffLen]; + memset(buffResize, 0, resizeBuffLen); + for(unsigned x = 0; x < this->outputWidth; x++) + { + if (x >= args.width) continue; + for(unsigned y = 0; y < this->outputHeight; y++) + { + if (y >= args.height) continue; + buffResize[y * this->outputWidth * 3 + x * 3] = buff[y * args.width * 3 + x * 3]; + buffResize[y * this->outputWidth * 3 + x * 3 + 1] = buff[y * args.width * 3 + x * 3 + 1]; + buffResize[y * this->outputWidth * 3 + x * 3 + 2] = buff[y * args.width * 3 + x * 3 + 2]; + } + } + + unsigned char *buffOut = NULL; + unsigned buffOutLen = 0; + DecodeFrame((unsigned char *)buffResize, resizeBuffLen, + args.pxFmt.c_str(), + this->outputWidth, this->outputHeight, + this->outputPxFmt.c_str(), + &buffOut, + &buffOutLen); + + assert(buffOutLen == this->framesize); + + //Replace current frame with new encoded frame + if(this->currentFrame!=NULL) + delete [] this->currentFrame; + this->currentFrame = buffOut; + + delete [] buffResize; + + } + + //If we have no data, initialise with a blank frame + if(this->currentFrame==NULL) + { + this->currentFrame = new unsigned char[this->framesize]; + memset(this->currentFrame, 0, this->framesize); + } + + int timeElapsed = secSinceLastFrame>=1; + + if(timeElapsed || buff != NULL) + { + //Send frame update due to time elapse + if(timeElapsed) + printf("Write frame due to elapse time\n"); + write(this->fdwr, this->currentFrame, this->framesize); + + this->lastFrameTime_sec = args.tv_sec; + this->lastFrameTime_usec = args.tv_usec; + } + + //Free image buffer + if(buff!=NULL) + delete [] buff; +} + +void Video_out::Run() +{ + if(verbose) printf("Thread started: %s\n", this->devName.c_str()); + int running = 1; + pthread_mutex_lock(&this->lock); + this->stopped = 0; + pthread_mutex_unlock(&this->lock); + + this->fdwr = open(this->devName.c_str(), O_RDWR); + assert(fdwr >= 0); + + struct v4l2_capability vid_caps; + int ret_code = ioctl(this->fdwr, VIDIOC_QUERYCAP, &vid_caps); + assert(ret_code != -1); + + struct v4l2_format vid_format; + memset(&vid_format, 0, sizeof(vid_format)); + + ret_code = ioctl(this->fdwr, VIDIOC_G_FMT, &vid_format); + if(verbose)print_format(&vid_format); + + int lw = 0; + int fw = 0; + if(strcmp(this->outputPxFmt.c_str(), "YVU420")==0) + { + lw = this->outputWidth; /* ??? */ + fw = ROUND_UP_4 (this->outputWidth) * ROUND_UP_2 (this->outputHeight); + fw += 2 * ((ROUND_UP_8 (this->outputWidth) / 2) * (ROUND_UP_2 (this->outputHeight) / 2)); + } + + if(strcmp(this->outputPxFmt.c_str(), "YUYV")==0 + || strcmp(this->outputPxFmt.c_str(), "UYVY")==0 ) + { + lw = (ROUND_UP_2 (this->outputWidth) * 2); + fw = lw * this->outputHeight; + } + + vid_format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; + vid_format.fmt.pix.width = this->outputWidth; + vid_format.fmt.pix.height = this->outputHeight; + vid_format.fmt.pix.pixelformat = 0; + if(strcmp(this->outputPxFmt.c_str(), "YUYV")==0) + vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; + if(strcmp(this->outputPxFmt.c_str(), "UYVY")==0) + vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; + if(strcmp(this->outputPxFmt.c_str(), "YVU420")==0) + vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YVU420; + if(strcmp(this->outputPxFmt.c_str(), "RGB24")==0) + vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + + vid_format.fmt.pix.sizeimage = lw; + vid_format.fmt.pix.field = V4L2_FIELD_NONE; + vid_format.fmt.pix.bytesperline = fw; + vid_format.fmt.pix.colorspace = V4L2_COLORSPACE_SRGB; + + if(verbose)print_format(&vid_format); + + ret_code = ioctl(this->fdwr, VIDIOC_S_FMT, &vid_format); + + assert(ret_code != -1); + + this->framesize = vid_format.fmt.pix.sizeimage; + int linewidth = vid_format.fmt.pix.bytesperline; + if(verbose)printf("frame: format=%s\tsize=%d\n", this->outputPxFmt.c_str(), framesize); + + try + { + while(running) + { + usleep(1000); + + this->SendFrameInternal(); + + pthread_mutex_lock(&this->lock); + try + { + running = !this->stop; + } + catch(std::exception &err) + { + if(verbose) printf("An exception has occured: %s\n", err.what()); + running = 0; + } + pthread_mutex_unlock(&this->lock); + } + } + catch(std::exception &err) + { + if(verbose) printf("An exception has occured: %s\n", err.what()); + } + + if(verbose) printf("Thread stopping\n"); + pthread_mutex_lock(&this->lock); + this->stopped = 1; + pthread_mutex_unlock(&this->lock); +} + +void Video_out::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height, + unsigned long tv_sec, + unsigned long tv_usec) +{ + pthread_mutex_lock(&this->lock); + if(verbose) printf("SendFrame %i %s %i %i\n", imgLen, pxFmt, width, height); + + //Take a shallow copy of the buffer and keep for worker thread + char *buffCpy = new char[imgLen]; + memcpy(buffCpy, imgIn, imgLen); + this->sendFrameBuffer.push_back(buffCpy); + + class SendFrameArgs sendFrameArgsTmp; + sendFrameArgsTmp.imgLen = imgLen; + sendFrameArgsTmp.pxFmt = pxFmt; + sendFrameArgsTmp.width = width; + sendFrameArgsTmp.tv_sec = tv_sec; + sendFrameArgsTmp.tv_usec = tv_usec; + this->sendFrameArgs.push_back(sendFrameArgsTmp); + + pthread_mutex_unlock(&this->lock); +} + +void Video_out::Stop() +{ + pthread_mutex_lock(&this->lock); + this->stop = 1; + pthread_mutex_unlock(&this->lock); +} + +int Video_out::WaitForStop() +{ + this->Stop(); + while(1) + { + pthread_mutex_lock(&this->lock); + int s = this->stopped; + pthread_mutex_unlock(&this->lock); + + if(s) return 1; + usleep(10000); + } +} + +void Video_out::SetOutputSize(int width, int height) +{ + pthread_mutex_lock(&this->lock); + this->outputWidth = width; + this->outputHeight = height; + pthread_mutex_unlock(&this->lock); +} + +void Video_out::SetOutputPxFmt(const char *fmt) +{ + pthread_mutex_lock(&this->lock); + this->outputPxFmt = fmt; + pthread_mutex_unlock(&this->lock); +} + +void *Video_out_manager_Worker_thread(void *arg) +{ + class Video_out *argobj = (class Video_out*) arg; + argobj->Run(); + + return NULL; +} + +// ***************************************************************** + +std::vector List_out_devices() +{ + std::vector out; + const char dir[] = "/dev"; + DIR *dp; + struct dirent *dirp; + if((dp = opendir(dir)) == NULL) { + printf("Error(%d) opening %s\n", errno, dir); + return out; + } + + while ((dirp = readdir(dp)) != NULL) { + if (strncmp(dirp->d_name, "video", 5) != 0) continue; + std::string tmp = "/dev/"; + tmp.append(dirp->d_name); + out.push_back(tmp); + } + closedir(dp); + return out; +} + + + diff --git a/v4l2out.h b/v4l2out.h new file mode 100644 index 0000000..b504132 --- /dev/null +++ b/v4l2out.h @@ -0,0 +1,53 @@ +#ifndef __V4L2OUT_H__ +#define __V4L2OUT_H__ + +#include +#include +#include +#include "base.h" + +class Video_out : public Base_Video_Out +{ +public: + std::string devName; + int stop; + int stopped; + pthread_mutex_t lock; + int verbose; + std::vector sendFrameArgs; + std::vector sendFrameBuffer; + unsigned long lastFrameTime_sec, lastFrameTime_usec; + int fdwr; + int framesize; + unsigned char *currentFrame; + int outputWidth; + int outputHeight; + std::string outputPxFmt; + + Video_out(const char *devNameIn); + virtual ~Video_out(); + +protected: + void SendFrameInternal(); + +public: + void Run(); + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height, + unsigned long tv_sec = 0, + unsigned long tv_usec = 0); + void Stop(); + int WaitForStop(); + + void SetOutputSize(int width, int height); + void SetOutputPxFmt(const char *fmt); +}; + +void *Video_out_manager_Worker_thread(void *arg); + +std::vector List_out_devices(); + +// ****************************************************************** + +#endif //__V4L2OUT_H__ + + diff --git a/videoin.cpp b/videoin.cpp new file mode 100644 index 0000000..c699191 --- /dev/null +++ b/videoin.cpp @@ -0,0 +1,279 @@ + +#include +#include "videoin.h" +#include +#ifdef _NT +#include "mfvideoin.h" +#endif +#ifdef _POSIX +#include "v4l2capture.h" +#endif + +void Device_manager_dealloc(Device_manager *self) +{ + //Stop high level threads + for(std::map::iterator it = self->threadArgStore->begin(); + it != self->threadArgStore->end(); it++) + { + PyObject *args = PyTuple_New(1); + PyTuple_SetItem(args, 0, PyString_FromString(it->first.c_str())); + Device_manager_stop(self, args); + Py_DECREF(args); + } + + delete self->threadArgStore; + self->ob_type->tp_free((PyObject *)self); +} + +int Device_manager_init(Device_manager *self, PyObject *args, + PyObject *kwargs) +{ + self->threadArgStore = new std::map; + return 0; +} + +PyObject *Device_manager_open(Device_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Check this device has not already been opened + std::map::iterator it = self->threadArgStore->find(devarg); + if(it!=self->threadArgStore->end()) + { + PyErr_SetString(PyExc_RuntimeError, "Device already opened."); + return NULL; + } + + pthread_t thread; + #ifdef _POSIX + Video_in_Manager *threadArgs = new Video_in_Manager(devarg); + #endif + #ifdef _NT + wchar_t *tmpDevName = new wchar_t[strlen(devarg)+1]; + size_t returnValue; + + mbstowcs_s(&returnValue, tmpDevName, strlen(devarg)+1, devarg, strlen(devarg)+1); + std::wstring tmpDevName2(tmpDevName); + delete [] tmpDevName; + + MfVideoIn *threadArgs = new MfVideoIn(tmpDevName2.c_str()); + #endif + + (*self->threadArgStore)[devarg] = threadArgs; + + #ifdef _POSIX + pthread_create(&thread, NULL, Video_in_Worker_thread, threadArgs); + #endif + #ifdef _NT + pthread_create(&thread, NULL, MfVideoIn_Worker_thread, threadArgs); + #endif + + threadArgs->OpenDevice(); + + Py_RETURN_NONE; +} + +PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) +{ + int size_x; + int size_y; + const char *fmt = NULL; + const char *devarg = NULL; + + if(!PyArg_ParseTuple(args, "sii|s", &devarg, &size_x, &size_y, &fmt)) + { + PyErr_BadArgument(); + return NULL; + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_SetString(PyExc_RuntimeError, "Device already not ready."); + return NULL; + } + + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->SetFormat(fmt, size_x, size_y); + + Py_RETURN_NONE; +} + +PyObject *Device_manager_Start(Device_manager *self, PyObject *args) +{ + + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + long buffer_count = 10; + if(PyTuple_Size(args) >= 4) + { + PyObject *pybufferarg = PyTuple_GetItem(args, 4); + buffer_count = PyInt_AsLong(pybufferarg); + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_SetString(PyExc_RuntimeError, "Device already not ready."); + return NULL; + } + + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->StartDevice(buffer_count); + + Py_RETURN_NONE; +} + +void PyDict_SetItemString_Decref(PyObject *dic, const char *key, PyObject *val) +{ + PyDict_SetItemString(dic, key, val); + Py_DECREF(val); +} + +PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) +{ + //std::cout << "Device_manager_Get_frame" << std::endl; + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_SetString(PyExc_RuntimeError, "Device already not ready."); + return NULL; + } + + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; + unsigned char *buffOut = NULL; + class FrameMetaData metaOut; + int ok = 0; + try + { + ok = threadArgs->GetFrame(&buffOut, &metaOut); + } + catch(std::exception &err) + { + PyErr_SetString(PyExc_RuntimeError, err.what()); + return NULL; + } + + if(ok && buffOut != NULL) + { + //Format output to python + PyObject *pymeta = PyDict_New(); + PyDict_SetItemString_Decref(pymeta, "width", PyInt_FromLong(metaOut.width)); + PyDict_SetItemString_Decref(pymeta, "height", PyInt_FromLong(metaOut.height)); + PyDict_SetItemString_Decref(pymeta, "format", PyString_FromString(metaOut.fmt.c_str())); + PyDict_SetItemString_Decref(pymeta, "sequence", PyInt_FromLong(metaOut.sequence)); + PyDict_SetItemString_Decref(pymeta, "tv_sec", PyInt_FromLong(metaOut.tv_sec)); + PyDict_SetItemString_Decref(pymeta, "tv_usec", PyInt_FromLong(metaOut.tv_usec)); + + PyObject *out = PyTuple_New(2); + PyTuple_SetItem(out, 0, PyByteArray_FromStringAndSize((char *)buffOut, metaOut.buffLen)); + PyTuple_SetItem(out, 1, pymeta); + + delete [] buffOut; + return out; + } + + if(!ok && buffOut!= NULL) //This generally should not happen + delete [] buffOut; + + Py_RETURN_NONE; +} + +PyObject *Device_manager_stop(Device_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_SetString(PyExc_RuntimeError, "Device already not ready."); + return NULL; + } + + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->StopDevice(); + + Py_RETURN_NONE; +} + +PyObject *Device_manager_close(Device_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_SetString(PyExc_RuntimeError, "Device already not ready."); + return NULL; + } + + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->CloseDevice(); + + //Stop worker thread + threadArgs->Stop(); + + //Release memeory + threadArgs->WaitForStop(); + delete threadArgs; + self->threadArgStore->erase(devarg); + + Py_RETURN_NONE; +} + +PyObject *Device_manager_list_devices(Device_manager *self) +{ + PyObject *out = PyList_New(0); + std::vector > devLi = List_in_devices(); + + for(unsigned i=0; i +#include "base.h" +#include +#include + +/*typedef struct { + PyObject_HEAD + int fd; + struct buffer *buffers; + int buffer_count; +} Video_device;*/ + +class Device_manager_cl{ +public: + PyObject_HEAD + std::map *threadArgStore; +}; +typedef Device_manager_cl Device_manager; + +int Device_manager_init(Device_manager *self, PyObject *args, + PyObject *kwargs); +void Device_manager_dealloc(Device_manager *self); +PyObject *Device_manager_open(Device_manager *self, PyObject *args); +PyObject *Device_manager_set_format(Device_manager *self, PyObject *args); +PyObject *Device_manager_Start(Device_manager *self, PyObject *args); +PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args); +PyObject *Device_manager_stop(Device_manager *self, PyObject *args); +PyObject *Device_manager_close(Device_manager *self, PyObject *args); +PyObject *Device_manager_list_devices(Device_manager *self); + + + +#endif //VIDEOIN_H + diff --git a/videoin.py b/videoin.py new file mode 100644 index 0000000..6cf8a59 --- /dev/null +++ b/videoin.py @@ -0,0 +1,37 @@ + +import videolive, time + +if __name__=="__main__": + inManager = videolive.Video_in_stream_manager() + print inManager + + devs = inManager.list_devices() + print devs + + if len(devs) == 0: + print "No source devices detected" + exit(0) + + inManager.open(devs[0][0]) + + time.sleep(1) + inManager.start(devs[0][0]) + count = 0 + + while 1: + time.sleep(0.01) + frame = inManager.get_frame(devs[0][0]) + if frame is None: continue + print len(frame[0]), frame[1] + count += 1 + + inManager.stop(devs[0][0]) + + time.sleep(1) + + inManager.close(devs[0][0]) + + time.sleep(1) + + del inManager + diff --git a/videoout.cpp b/videoout.cpp new file mode 100644 index 0000000..85722d4 --- /dev/null +++ b/videoout.cpp @@ -0,0 +1,167 @@ + +#include +#include +#include +#include "videoout.h" +#ifdef _NT +#include "namedpipeout.h" +#endif +#if _POSIX +#include "v4l2out.h" +#endif + +int Video_out_manager_init(Video_out_manager *self, PyObject *args, + PyObject *kwargs) +{ + self->threads = new std::map; + return 0; +} + +void Video_out_manager_dealloc(Video_out_manager *self) +{ + //Stop high level threads + for(std::map::iterator it = self->threads->begin(); + it != self->threads->end(); it++) + { + it->second->Stop(); + it->second->WaitForStop(); + } + + delete self->threads; + self->threads = NULL; + self->ob_type->tp_free((PyObject *)self); +} + +PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) +{ + std::cout << "Video_out_manager_open" << std::endl; + + //Process arguments + const char *devarg = NULL; + const char *pxFmtIn = NULL; + int widthIn = 0; + int heightIn = 0; + + if(!PyArg_ParseTuple(args, "ssii", &devarg, &pxFmtIn, &widthIn, &heightIn)) + { + PyErr_SetString(PyExc_RuntimeError, "Incorrect arguments to function."); + return NULL; + } + + //Create worker thread + pthread_t thread; + #ifdef _POSIX + Video_out *threadArgs = new Video_out(devarg); + #endif + #ifdef _NT + NamedPipeOut *threadArgs = new NamedPipeOut(devarg); + #endif + + (*self->threads)[devarg] = threadArgs; + threadArgs->SetOutputSize(widthIn, heightIn); + threadArgs->SetOutputPxFmt(pxFmtIn); + + #ifdef _POSIX + pthread_create(&thread, NULL, Video_out_manager_Worker_thread, threadArgs); + #endif + #ifdef _NT + pthread_create(&thread, NULL, NamedPipeOut_Worker_thread, threadArgs); + #endif + + Py_RETURN_NONE; +} + +PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) +{ + //printf("Video_out_manager_Send_frame\n"); + //dev = '\\dev\\video0', img, pixel_format, width, height + + //Process arguments + const char *devarg = NULL; + const char *imgIn = NULL; + const char *pxFmtIn = NULL; + int widthIn = 0; + int heightIn = 0; + + if(PyObject_Length(args) < 5) + { + PyErr_SetString(PyExc_RuntimeError, "Too few arguments."); + return NULL; + } + + PyObject *pydev = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydev); + + PyObject *pyimg = PyTuple_GetItem(args, 1); + imgIn = NULL; + if(imgIn==NULL && PyString_Check(pyimg)) imgIn = PyString_AsString(pyimg); + if(imgIn==NULL && PyByteArray_Check(pyimg)) imgIn = PyByteArray_AsString(pyimg); + Py_ssize_t imgLen = PyObject_Length(pyimg); + + if(imgIn == NULL) + {PyErr_SetString(PyExc_RuntimeError, "Argument 2 must be a string or bytearray."); return NULL;} + + PyObject *pyPxFmt = PyTuple_GetItem(args, 2); + pxFmtIn = PyString_AsString(pyPxFmt); + + PyObject *pyWidth = PyTuple_GetItem(args, 3); + widthIn = PyInt_AsLong(pyWidth); + + PyObject *pyHeight = PyTuple_GetItem(args, 4); + heightIn = PyInt_AsLong(pyHeight); + + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + try + { + it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn); + } + catch(std::exception &err) + { + PyErr_SetString(PyExc_RuntimeError, err.what()); + return NULL; + } + } + else + { + PyErr_SetString(PyExc_RuntimeError, "Device not found."); + return NULL; + } + + Py_RETURN_NONE; +} + +PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Stop worker thread + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + it->second->Stop(); + } + + Py_RETURN_NONE; +} + +PyObject *Video_out_manager_list_devices(Video_out_manager *self) +{ + PyObject *out = PyList_New(0); + std::vector devLi = List_out_devices(); + for(unsigned i=0; i +#include +#include +#include "base.h" + +class Video_out_manager_cl{ +public: + PyObject_HEAD + std::map *threads; +}; +typedef Video_out_manager_cl Video_out_manager; + +int Video_out_manager_init(Video_out_manager *self, PyObject *args, + PyObject *kwargs); + +void Video_out_manager_dealloc(Video_out_manager *self); + +PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args); +PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args); +PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args); +PyObject *Video_out_manager_list_devices(Video_out_manager *self); + +#endif //VIDEOOUT_H + diff --git a/videoout.py b/videoout.py new file mode 100644 index 0000000..9807f8d --- /dev/null +++ b/videoout.py @@ -0,0 +1,31 @@ + +import videolive, time +import numpy as np + +if __name__=="__main__": + outManager = videolive.Video_out_manager() + print outManager + + devs = outManager.list_devices() + print devs + + if len(devs) == 0: + print "No source devices detected" + exit(0) + + outManager.open(devs[0], "RGB24", 640, 480) + + imgLen = 640 * 480 * 3 + img = np.zeros(shape=(imgLen,), dtype=np.uint8) + + for i in range(imgLen): + if (i % 500) > 250: + img[i] = np.random.randint(0, 255) + else: + img[i] = 128 + + while(1): + outManager.send_frame(devs[0], str(img.tostring()), "RGB24", 640, 480) + + time.sleep(0.1) + diff --git a/videooutfile.cpp b/videooutfile.cpp new file mode 100644 index 0000000..e5d1437 --- /dev/null +++ b/videooutfile.cpp @@ -0,0 +1,289 @@ + +#include +#include +#include +#include "videooutfile.h" +#ifdef _NT +#include "mfvideooutfile.h" +#endif +#if _POSIX +//TODO +#endif + +int Video_out_file_manager_init(Video_out_file_manager *self, PyObject *args, + PyObject *kwargs) +{ + self->threads = new std::map; + return 0; +} + +void Video_out_file_manager_dealloc(Video_out_file_manager *self) +{ + //Stop high level threads + for(std::map::iterator it = self->threads->begin(); + it != self->threads->end(); it++) + { + it->second->Stop(); + it->second->WaitForStop(); + } + + delete self->threads; + self->threads = NULL; + self->ob_type->tp_free((PyObject *)self); +} + +PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *args) +{ + //std::cout << "Video_out_file_manager_open" << std::endl; + + //Process arguments + const char *devarg = NULL; + int widthIn = 0; + int heightIn = 0; + + if(!PyArg_ParseTuple(args, "sii", &devarg, &widthIn, &heightIn)) + { + PyErr_SetString(PyExc_RuntimeError, "Incorrect arguments to function."); + return NULL; + } + + //Create worker thread + pthread_t thread; + #ifdef _POSIX + //TODO + PyErr_SetString(PyExc_RuntimeError, "Not implemented"); + return NULL; + #endif + #ifdef _NT + MfVideoOutFile *threadArgs = NULL; + try + { + threadArgs = new MfVideoOutFile(devarg); + } + catch(std::exception &err) + { + PyErr_SetString(PyExc_RuntimeError, err.what()); + return NULL; + } + #endif + + #ifdef _NT //TODO Remove ifdef when POSIX approah is established + (*self->threads)[devarg] = threadArgs; + try + { + threadArgs->SetOutputSize(widthIn, heightIn); + } + catch(std::exception &err) + { + PyErr_SetString(PyExc_RuntimeError, err.what()); + return NULL; + } + #endif + + #ifdef _POSIX + //TODO + #endif + #ifdef _NT + pthread_create(&thread, NULL, MfVideoOut_File_Worker_thread, threadArgs); + #endif + + Py_RETURN_NONE; +} + +PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObject *args) +{ + //std::cout << "Video_out_file_manager_Send_frame" << std::endl; + //dev = '\\dev\\video0', img, pixel_format, width, height, time_sec, time_usec + + //Process arguments + const char *devarg = NULL; + const char *imgIn = NULL; + const char *pxFmtIn = NULL; + int widthIn = 0; + int heightIn = 0; + double time_sec = 0; + + if(PyObject_Length(args) < 5) + { + PyErr_SetString(PyExc_RuntimeError, "Too few arguments."); + return NULL; + } + + PyObject *pydev = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydev); + + PyObject *pyimg = PyTuple_GetItem(args, 1); + if(imgIn==NULL && PyString_Check(pyimg)) imgIn = PyString_AsString(pyimg); + if(imgIn==NULL && PyByteArray_Check(pyimg)) imgIn = PyByteArray_AsString(pyimg); + + Py_ssize_t imgLen = PyObject_Length(pyimg); + + if(imgIn == NULL) + {PyErr_SetString(PyExc_RuntimeError, "Argument 2 must be a string or byte array.");return NULL;} + + PyObject *pyPxFmt = PyTuple_GetItem(args, 2); + pxFmtIn = PyString_AsString(pyPxFmt); + + PyObject *pyWidth = PyTuple_GetItem(args, 3); + widthIn = PyInt_AsLong(pyWidth); + + PyObject *pyHeight = PyTuple_GetItem(args, 4); + heightIn = PyInt_AsLong(pyHeight); + + if(PyObject_Length(args) > 5) + { + PyObject *pyTimeSec = PyTuple_GetItem(args, 5); + if(pyTimeSec == Py_None) time_sec = 0; + if(PyInt_Check(pyTimeSec)) time_sec = PyInt_AsLong(pyTimeSec); + if(PyFloat_Check(pyTimeSec)) time_sec = PyFloat_AsDouble(pyTimeSec); + } + + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + try + { + unsigned int timeSec = (unsigned int)(time_sec); + double time_usec = (time_sec - timeSec) * 1e6; + + it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn, time_sec, (unsigned int)(time_usec+0.5)); + } + catch (std::exception &err) + { + PyErr_SetString(PyExc_RuntimeError, err.what()); + return NULL; + } + } + else + { + PyErr_SetString(PyExc_RuntimeError, "Device not found."); + return NULL; + } + + Py_RETURN_NONE; +} + +PyObject *Video_out_file_manager_close(Video_out_file_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Stop worker thread + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + try + { + it->second->Stop(); + } + catch(std::exception &err) + { + PyErr_SetString(PyExc_RuntimeError, err.what()); + return NULL; + } + + } + + Py_RETURN_NONE; +} + +PyObject *Video_out_file_manager_Set_Frame_Rate(Video_out_file_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = NULL; + int frameRate = 0; + + if(PyObject_Length(args) < 2) + { + PyErr_SetString(PyExc_RuntimeError, "Too few arguments."); + return NULL; + } + + PyObject *pydev = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydev); + + PyObject *pyFrameRate = PyTuple_GetItem(args, 1); + frameRate = PyInt_AsLong(pyFrameRate); + + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + try + { + it->second->SetFrameRate(frameRate); + } + catch(std::exception &err) + { + PyErr_SetString(PyExc_RuntimeError, err.what()); + return NULL; + } + + } + else + { + PyErr_SetString(PyExc_RuntimeError, "Device not found."); + return NULL; + } + + Py_RETURN_NONE; +} + +PyObject *Video_out_file_manager_Set_Video_Codec(Video_out_file_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = NULL; + char *videoCodec = NULL; + int bitRate = 0; + + if(PyObject_Length(args) < 2) + { + PyErr_SetString(PyExc_RuntimeError, "Too few arguments."); + return NULL; + } + + PyObject *pydev = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydev); + + PyObject *pyVideoCodec = PyTuple_GetItem(args, 1); + if(pyVideoCodec != Py_None) + videoCodec = PyString_AsString(pyVideoCodec); + else + videoCodec = NULL; + + if(PyObject_Length(args) >= 3) + { + PyObject *pyBitRate = PyTuple_GetItem(args, 2); + bitRate = PyInt_AsLong(pyBitRate); + } + + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + try + { + it->second->SetVideoCodec(videoCodec, bitRate); + } + catch(std::exception &err) + { + PyErr_SetString(PyExc_RuntimeError, err.what()); + return NULL; + } + + } + else + { + PyErr_SetString(PyExc_RuntimeError, "Device not found."); + return NULL; + } + + Py_RETURN_NONE; +} diff --git a/videooutfile.h b/videooutfile.h new file mode 100644 index 0000000..bb33aec --- /dev/null +++ b/videooutfile.h @@ -0,0 +1,29 @@ + +#ifndef VIDEOOUTFILE_H +#define VIDEOOUTFILE_H + +#include +#include +#include +#include "base.h" + +class Video_out_file_manager_cl{ +public: + PyObject_HEAD + std::map *threads; +}; +typedef Video_out_file_manager_cl Video_out_file_manager; + +int Video_out_file_manager_init(Video_out_file_manager *self, PyObject *args, + PyObject *kwargs); + +void Video_out_file_manager_dealloc(Video_out_file_manager *self); + +PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *args); +PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObject *args); +PyObject *Video_out_file_manager_close(Video_out_file_manager *self, PyObject *args); +PyObject *Video_out_file_manager_Set_Frame_Rate(Video_out_file_manager *self, PyObject *args); +PyObject *Video_out_file_manager_Set_Video_Codec(Video_out_file_manager *self, PyObject *args); + +#endif //VIDEOOUTFILE_H + diff --git a/videooutfile.py b/videooutfile.py new file mode 100644 index 0000000..d0bd584 --- /dev/null +++ b/videooutfile.py @@ -0,0 +1,44 @@ + +import videolive, time, random +import numpy as np +import scipy.misc as misc + +if __name__=="__main__": + outManager = videolive.Video_out_file_manager() + print outManager + + lena = misc.imread("Lenna.png") + print lena.shape + w = lena.shape[1] + h = lena.shape[0] + fina = "test.wmv" + + realTimeFrames = 0 + + outManager.open(fina, 640, 480) + print "set_video_codec" + outManager.set_video_codec(fina, "H264", 800000) + print "set_frame_rate" + outManager.set_frame_rate(fina, 25) + + imgLen = w * h * 3 + #img = np.ones(shape=(imgLen,), dtype=np.uint8) * 0 + #for i in range(imgLen): + # if (i % 3) == 0: + # img[i] = 0xff + # if (i % 3) == 1: + # img[i] = random.randint(0,255) + + for frNum in range(200): + #img = np.random.randint(0, 255, size=(imgLen,)) + #for i in range(imgLen): + # if (i % 500) <= 250: + # img[i] = 128 + + print "Frame", frNum + outManager.send_frame(fina, str(lena.tostring()), "RGB24", w, h) + + if realTimeFrames: + time.sleep(frNum / 500.) + else: + time.sleep(0.01) diff --git a/winsource/resource.h b/winsource/resource.h new file mode 100644 index 0000000..952244e --- /dev/null +++ b/winsource/resource.h @@ -0,0 +1,17 @@ +//{{NO_DEPENDENCIES}} +// Microsoft Developer Studio generated include file. +// Used by ball.rc +// +#define IDS_TITLE 100 + +// Next default values for new objects +// +#ifdef APSTUDIO_INVOKED +#ifndef APSTUDIO_READONLY_SYMBOLS +#define _APS_NO_MFC 1 +#define _APS_NEXT_RESOURCE_VALUE 101 +#define _APS_NEXT_COMMAND_VALUE 40001 +#define _APS_NEXT_CONTROL_VALUE 1000 +#define _APS_NEXT_SYMED_VALUE 101 +#endif +#endif diff --git a/winsource/winsource.cpp b/winsource/winsource.cpp new file mode 100644 index 0000000..c55386f --- /dev/null +++ b/winsource/winsource.cpp @@ -0,0 +1,870 @@ + +#include +#include +#include +#include +#include "winsource.h" + +#pragma warning(disable:4710) // 'function': function not inlined (optimzation) + +// Setup data + +#define CreateComObject(clsid, iid, var) CoCreateInstance( clsid, NULL, CLSCTX_INPROC_SERVER, iid, (void **)&var); +STDAPI AMovieSetupRegisterServer( CLSID clsServer, LPCWSTR szDescription, LPCWSTR szFileName, LPCWSTR szThreadingModel = L"Both", LPCWSTR szServerType = L"InprocServer32" ); +STDAPI AMovieSetupUnregisterServer( CLSID clsServer ); + +const AMOVIESETUP_MEDIATYPE sudOpPinTypes = +{ + &MEDIATYPE_Video, // Major type + &MEDIASUBTYPE_NULL // Minor type +}; + +const AMOVIESETUP_PIN sudOpPin = +{ + L"Output", // Pin string name + FALSE, // Is it rendered + TRUE, // Is it an output + FALSE, // Can we have none + FALSE, // Can we have many + &CLSID_NULL, // Connects to filter + NULL, // Connects to pin + 1, // Number of types + &sudOpPinTypes }; // Pin details + +const AMOVIESETUP_FILTER sudBallax = +{ + &CLSID_Kinatomic_Camera, // Filter CLSID + L"Kinatomic Virtual Camera", // String name + MERIT_DO_NOT_USE, // Filter merit + 1, // Number pins + &sudOpPin // Pin details +}; + + +// COM global table of objects in this dll + +CFactoryTemplate g_Templates[] = { + { L"Kinatomic Virtual Camera" + , &CLSID_Kinatomic_Camera + , CCameraOutput::CreateInstance + , NULL + , &sudBallax } +}; +int g_cTemplates = sizeof(g_Templates) / sizeof(g_Templates[0]); + + +//////////////////////////////////////////////////////////////////////// +// +// Exported entry points for registration and unregistration +// (in this case they only call through to default implementations). +// +//////////////////////////////////////////////////////////////////////// + +STDAPI RegisterFilters( BOOL bRegister ) +{ + HRESULT hr = NOERROR; + WCHAR achFileName[MAX_PATH]; + char achTemp[MAX_PATH]; + ASSERT(g_hInst != 0); + + if( 0 == GetModuleFileNameA(g_hInst, achTemp, sizeof(achTemp))) + return AmHresultFromWin32(GetLastError()); + + MultiByteToWideChar(CP_ACP, 0L, achTemp, lstrlenA(achTemp) + 1, + achFileName, NUMELMS(achFileName)); + + hr = CoInitialize(0); + if(bRegister) + { + hr = AMovieSetupRegisterServer(CLSID_Kinatomic_Camera, L"Kinatomic Virtual Camera", achFileName, L"Both", L"InprocServer32"); + } + + if( SUCCEEDED(hr) ) + { + IFilterMapper2 *fm = 0; + hr = CreateComObject( CLSID_FilterMapper2, IID_IFilterMapper2, fm ); + if( SUCCEEDED(hr) ) + { + if(bRegister) + { + IMoniker *pMoniker = 0; + REGFILTER2 rf2; + rf2.dwVersion = 1; + rf2.dwMerit = MERIT_DO_NOT_USE; + rf2.cPins = 1; + rf2.rgPins = &sudOpPin; + hr = fm->RegisterFilter(CLSID_Kinatomic_Camera, L"Kinatomic Virtual Camera", &pMoniker, &CLSID_VideoInputDeviceCategory, NULL, &rf2); + } + else + { + hr = fm->UnregisterFilter(&CLSID_VideoInputDeviceCategory, 0, CLSID_Kinatomic_Camera); + } + } + + // release interface + // + if(fm) + fm->Release(); + } + + if( SUCCEEDED(hr) && !bRegister ) + hr = AMovieSetupUnregisterServer( CLSID_Kinatomic_Camera ); + + CoFreeUnusedLibraries(); + CoUninitialize(); + return hr; +} + +// +// DllRegisterServer +// +// Exported entry points for registration and unregistration +// +STDAPI DllRegisterServer() +{ + return RegisterFilters(TRUE); + +} // DllRegisterServer + + +// +// DllUnregisterServer +// +STDAPI DllUnregisterServer() +{ + return RegisterFilters(FALSE); + +} // DllUnregisterServer + + +// +// DllEntryPoint +// +extern "C" BOOL WINAPI DllEntryPoint(HINSTANCE, ULONG, LPVOID); + +BOOL APIENTRY DllMain(HANDLE hModule, + DWORD dwReason, + LPVOID lpReserved) +{ + return DllEntryPoint((HINSTANCE)(hModule), dwReason, lpReserved); +} + +// +// CreateInstance +// +// The only allowed way to create instances of stream! +// +CUnknown * WINAPI CCameraOutput::CreateInstance(LPUNKNOWN lpunk, HRESULT *phr) +{ + ASSERT(phr); + CUnknown *punk = new CCameraOutput(lpunk, phr); + return punk; + +} // CreateInstance + +HRESULT CCameraOutput::QueryInterface(REFIID riid, void **ppv) +{ + //Forward request for IAMStreamConfig & IKsPropertySet to the pin + if(riid == _uuidof(IAMStreamConfig) || riid == _uuidof(IKsPropertySet)) + return m_paStreams[0]->QueryInterface(riid, ppv); + else + return CSource::QueryInterface(riid, ppv); +} + +// +// Constructor +// +// Initialise a CCameraStream object so that we have a pin. +// +CCameraOutput::CCameraOutput(LPUNKNOWN lpunk, HRESULT *phr) : + CSource(NAME("Kinatomic Virtual Camera"), lpunk, CLSID_Kinatomic_Camera) +{ + + ASSERT(phr); + CAutoLock cAutoLock(&m_cStateLock); + // Create the one and only output pin + m_paStreams = (CSourceStream **) new CCameraStream*[1]; + m_paStreams[0] = new CCameraStream(phr, this, L"Kinatomic Virtual Camera"); + +} // (Constructor) + +// +// Constructor +// +CCameraStream::CCameraStream(HRESULT *phr, + CCameraOutput *pParent, + LPCWSTR pPinName) : + CSourceStream(NAME("Kinatomic Virtual Camera"),phr, pParent, pPinName), + m_pParent(pParent) +{ + GetMediaType(4, &m_mt); + + memset(&this->rxo, 0x00, sizeof(OVERLAPPED)); + memset(&this->txo, 0x00, sizeof(OVERLAPPED)); + this->pipeHandle = INVALID_HANDLE_VALUE; + + this->currentFrame = NULL; + this->currentFrameLen = 0; + this->testCursor = 0; + this->tmpBuff = NULL; + + this->rxBuff = NULL; + this->rxBuffLen = 0; + this->rxBuffAlloc = 0; + this->fillBufferCount = 0; + + SYSTEMTIME systime; + GetSystemTime(&systime); + SystemTimeToFileTime(&systime, &this->lastRxUpdateTime); + SystemTimeToFileTime(&systime, &this->lastTxUpdateTime); + +} // (Constructor) + +// +// Destructor +// +CCameraStream::~CCameraStream() +{ + if(this->pipeHandle != 0) + CloseHandle(this->pipeHandle); + if(this->currentFrame != NULL) + delete [] this->currentFrame; + this->currentFrame = NULL; + this->currentFrameLen = 0; + + this->pipeHandle = 0; + + if(this->rxBuff!=NULL) + delete [] this->rxBuff; + this->rxBuffLen = 0; + this->rxBuffAlloc = 0; + if(this->tmpBuff!=NULL) + delete [] tmpBuff; + +} // (Destructor) + +HRESULT CCameraStream::QueryInterface(REFIID riid, void **ppv) +{ + // Standard OLE stuff + if(riid == _uuidof(IAMStreamConfig)) + *ppv = (IAMStreamConfig*)this; + else if(riid == _uuidof(IKsPropertySet)) + *ppv = (IKsPropertySet*)this; + else + return CSourceStream::QueryInterface(riid, ppv); + + AddRef(); + return S_OK; +} + +int CCameraStream::EstablishPipeConnection() +{ + if(this->pipeHandle == INVALID_HANDLE_VALUE) + { + LPCTSTR n = L"\\\\.\\pipe\\testpipe"; + + this->pipeHandle = CreateFile(n, + GENERIC_READ | GENERIC_WRITE, + FILE_SHARE_READ | FILE_SHARE_WRITE, + NULL, + OPEN_EXISTING, + FILE_ATTRIBUTE_NORMAL | FILE_FLAG_OVERLAPPED, + NULL); + + if(this->pipeHandle != INVALID_HANDLE_VALUE) + { + memset(&this->rxo, 0x00, sizeof(OVERLAPPED)); + memset(&this->txo, 0x00, sizeof(OVERLAPPED)); + } + } + + return this->pipeHandle != INVALID_HANDLE_VALUE; +} + +int CCameraStream::ReceiveDataViaNamedPipe() +{ + this->EstablishPipeConnection(); + + int frameChanged = 0; + + if(this->pipeHandle != INVALID_HANDLE_VALUE) + { + + //Receive messages from named pipe + const int tmpBuffLen = 1024*1024; + if(tmpBuff==NULL) + tmpBuff = new char[tmpBuffLen]; + DWORD bytesRead = 0; + BOOL res = 0; + + if(HasOverlappedIoCompleted(&this->rxo)) + { + res = ReadFileEx(this->pipeHandle, + tmpBuff, + tmpBuffLen, + &rxo, + NULL); + } + + if(res==0 && GetLastError() == ERROR_BROKEN_PIPE) + { + CloseHandle(this->pipeHandle); + this->pipeHandle = INVALID_HANDLE_VALUE; + return 0; + } + + res = GetOverlappedResult(this->pipeHandle, &this->rxo, &bytesRead, FALSE); + + if(res==0 && GetLastError() == ERROR_BROKEN_PIPE) + { + CloseHandle(this->pipeHandle); + this->pipeHandle = INVALID_HANDLE_VALUE; + return 0; + } + + if(res && bytesRead > 0) + { + //Merge receive string with buffer + if(rxBuff != NULL && rxBuffLen + bytesRead <= rxBuffAlloc) + { + //No need to reallocate + memcpy(&rxBuff[rxBuffLen], tmpBuff, bytesRead); + rxBuffLen += bytesRead; + } + else + { + //Buffer must be resized + if(rxBuff != NULL) + { + char *tmp = new (std::nothrow) char[rxBuffLen + bytesRead]; + if(tmp!=NULL) + { + memcpy(tmp, rxBuff, rxBuffLen); + memcpy(&tmp[rxBuffLen], tmpBuff, bytesRead); + delete [] rxBuff; + + rxBuff = tmp; + rxBuffLen = rxBuffLen + bytesRead; + rxBuffAlloc = rxBuffLen + bytesRead; + } + else + { + return -1; + } + } + else + { + rxBuff = new (std::nothrow) char[bytesRead]; + if(rxBuff == NULL) + { + return - 1; + } + memcpy(rxBuff, tmpBuff, bytesRead); + + rxBuffLen = bytesRead; + rxBuffAlloc = bytesRead; + } + } + + //Split receive buffer into separate messages + UINT32 cursor = 0; + int processing = 1; + while(processing && (rxBuffLen - cursor) > 8 && rxBuff != NULL) + { + UINT32 *wordArray = (UINT32 *)&rxBuff[cursor]; + UINT32 msgType = wordArray[0]; + UINT32 msgLen = wordArray[1]; + if(rxBuffLen-cursor >= 8+msgLen) + { + char *payload = &this->rxBuff[cursor+8]; + UINT32 payloadLen = msgLen - 8; + UINT32 *payloadArray = (UINT32 *)payload; + + if(msgType == 2) + { + //Message is new frame + if(this->currentFrame!=NULL) + for(unsigned i=0; icurrentFrameLen; i++) + { + this->currentFrame[i] = payload[i]; + } + + frameChanged = 1; + + } + + cursor += 8+msgLen; + } + else + { + processing = 0; + } + } + + //Store unprocessed data in buffer + if(cursor > 0 && rxBuff != NULL) + { + memcpy(rxBuff, &rxBuff[cursor], rxBuffLen - cursor); + rxBuffLen = rxBuffLen - cursor; + } + //rxBuffLen = 0; + + } + } + + return frameChanged; + +} + + +void CCameraStream::SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 bufflen) +{ + this->EstablishPipeConnection(); + + if(this->pipeHandle != INVALID_HANDLE_VALUE) + { + /*for(DWORD i=0; icurrentFrameLen; i++) + { + this->currentFrame[i] = 0x255; + }*/ + + //Transmit test message using named pipe + DWORD bytesWritten = 0; + const int buffLen = 4*5; + char test[buffLen]; + UINT32 *pMsgType = (UINT32 *)&test[0]; + *pMsgType = 1; + UINT32 *pMsgLen = (UINT32 *)&test[4]; + *pMsgLen = 4*3; + UINT32 *pWidth = (UINT32 *)&test[8]; + *pWidth = width; + UINT32 *pHeight = (UINT32 *)&test[12]; + *pHeight = height; + UINT32 *pBuffLen = (UINT32 *)&test[16]; + *pBuffLen = bufflen; + + if(HasOverlappedIoCompleted(&this->txo)) + { + BOOL res = WriteFileEx(this->pipeHandle, test, buffLen, &this->txo, NULL); + + if(res==0 && GetLastError() == ERROR_BROKEN_PIPE) + { + CloseHandle(this->pipeHandle); + this->pipeHandle = INVALID_HANDLE_VALUE; + return; + } + } + + BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE); + + if(res==0 && GetLastError() == ERROR_BROKEN_PIPE) + { + CloseHandle(this->pipeHandle); + this->pipeHandle = INVALID_HANDLE_VALUE; + return; + } + } +} + +void CCameraStream::SendErrorViaNamedPipe(UINT32 errCode) +{ + this->EstablishPipeConnection(); + + if(this->pipeHandle != INVALID_HANDLE_VALUE) + { + + //Transmit test message using named pipe + DWORD bytesWritten = 0; + const int buffLen = 4*3; + char test[buffLen]; + UINT32 *pMsgType = (UINT32 *)&test[0]; + *pMsgType = 1; + UINT32 *pMsgLen = (UINT32 *)&test[4]; + *pMsgLen = 4; + UINT32 *pError = (UINT32 *)&test[8]; + *pError = errCode; + + if(HasOverlappedIoCompleted(&this->txo)) + { + BOOL res = WriteFileEx(this->pipeHandle, test, buffLen, &this->txo, NULL); + + if(res==0 && GetLastError() == ERROR_BROKEN_PIPE) + { + CloseHandle(this->pipeHandle); + this->pipeHandle = INVALID_HANDLE_VALUE; + return; + } + } + + BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE); + + if(res==0 && GetLastError() == ERROR_BROKEN_PIPE) + { + CloseHandle(this->pipeHandle); + this->pipeHandle = INVALID_HANDLE_VALUE; + return; + } + } + +} + + +// +// FillBuffer +// +HRESULT CCameraStream::FillBuffer(IMediaSample *pms) +{ + this->fillBufferCount ++; + + VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)(m_mt.Format()); + LONG width = pvi->bmiHeader.biWidth; + LONG height = pvi->bmiHeader.biHeight; + + BYTE *pData; + long lDataLen; + pms->GetPointer(&pData); + lDataLen = pms->GetSize(); + + //Calculate time since last frame update + SYSTEMTIME systime; + GetSystemTime(&systime); + FILETIME fiTime; + SystemTimeToFileTime(&systime, &fiTime); + LARGE_INTEGER fiTimeNum; + fiTimeNum.HighPart = fiTime.dwHighDateTime; + fiTimeNum.LowPart = fiTime.dwLowDateTime; + + LARGE_INTEGER lastRxUpdate; + lastRxUpdate.HighPart = this->lastRxUpdateTime.dwHighDateTime; + lastRxUpdate.LowPart = this->lastRxUpdateTime.dwLowDateTime; + LARGE_INTEGER lastTxUpdate; + lastTxUpdate.HighPart = this->lastTxUpdateTime.dwHighDateTime; + lastTxUpdate.LowPart = this->lastTxUpdateTime.dwLowDateTime; + + LARGE_INTEGER elapseRx; + elapseRx.QuadPart = fiTimeNum.QuadPart - lastRxUpdate.QuadPart; + float elapseRxMs = elapseRx.LowPart / 10000.f; + + LARGE_INTEGER elapseTx; + elapseTx.QuadPart = fiTimeNum.QuadPart - lastTxUpdate.QuadPart; + float elapseTxMs = elapseTx.LowPart / 10000.f; + + int frameChanged = 0; + + //Initialise test frame + if(this->currentFrame == NULL) + { + this->currentFrame = new BYTE[lDataLen]; + this->currentFrameLen = lDataLen; + + long cursor = 0; + for(LONG y=0; y < height; y++) + for(LONG x=0; x < width; x++) + { + if(cursor >= this->currentFrameLen) continue; + + this->currentFrame[cursor] = x % 255; //Blue + this->currentFrame[cursor+1] = y % 255; //Green + this->currentFrame[cursor+2] = rand(); //Red + + cursor += 3; + } + frameChanged = 1; + } + + if(elapseRxMs > 10.) + { + int ret = this->ReceiveDataViaNamedPipe(); + if(ret) frameChanged = ret; + + this->lastRxUpdateTime=fiTime; + } + + if(this->currentFrame != NULL) + { + /* REFERENCE_TIME rtNow; + REFERENCE_TIME avgFrameTime = ((VIDEOINFOHEADER*)m_mt.pbFormat)->AvgTimePerFrame; + + //This is the slow code!? + rtNow = m_rtLastTime; + m_rtLastTime += avgFrameTime; + pms->SetTime(&rtNow, &m_rtLastTime); + pms->SetSyncPoint(TRUE); + //End of slow?*/ + unsigned bytesToCopy = lDataLen; + if(this->currentFrameLen < bytesToCopy) + bytesToCopy = this->currentFrameLen; + + memcpy(pData, this->currentFrame, bytesToCopy); + } + + if(elapseTxMs > 40.) + { + this->SendStatusViaNamedPipe(width, height, lDataLen); + this->lastTxUpdateTime=fiTime; + } + + /*for(LONG i=0;iFormat()); + HRESULT hr = CSourceStream::SetMediaType(pMediaType); + return hr; +} // SetMediaType + +////////////////////////////////////////////////////////////////////////// +// This is called when the output format has been negotiated +////////////////////////////////////////////////////////////////////////// + +// See Directshow help topic for IAMStreamConfig for details on this method +HRESULT CCameraStream::GetMediaType(int iPosition, CMediaType *pmt) +{ + if(iPosition < 0) return E_INVALIDARG; + if(iPosition > 8) return VFW_S_NO_MORE_ITEMS; + + if(iPosition == 0) + { + *pmt = m_mt; + return S_OK; + } + + DECLARE_PTR(VIDEOINFOHEADER, pvi, pmt->AllocFormatBuffer(sizeof(VIDEOINFOHEADER))); + ZeroMemory(pvi, sizeof(VIDEOINFOHEADER)); + + pvi->bmiHeader.biCompression = BI_RGB; + pvi->bmiHeader.biBitCount = 24; + pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); + pvi->bmiHeader.biWidth = 160 * iPosition; + pvi->bmiHeader.biHeight = 120 * iPosition; + pvi->bmiHeader.biPlanes = 1; + pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); + pvi->bmiHeader.biClrImportant = 0; + + pvi->AvgTimePerFrame = 1000000; + + SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered. + SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle + + pmt->SetType(&MEDIATYPE_Video); + pmt->SetFormatType(&FORMAT_VideoInfo); + pmt->SetTemporalCompression(FALSE); + + // Work out the GUID for the subtype from the header info. + const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader); + pmt->SetSubtype(&SubTypeGUID); + pmt->SetSampleSize(pvi->bmiHeader.biSizeImage); + + return NOERROR; + +} // GetMediaType + +// This method is called to see if a given output format is supported +HRESULT CCameraStream::CheckMediaType(const CMediaType *pMediaType) +{ + VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)(pMediaType->Format()); + if(*pMediaType != m_mt) + return E_INVALIDARG; + return S_OK; +} // CheckMediaType + +// This method is called after the pins are connected to allocate buffers to stream data +HRESULT CCameraStream::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pProperties) +{ + CAutoLock cAutoLock(m_pFilter->pStateLock()); + HRESULT hr = NOERROR; + + VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) m_mt.Format(); + pProperties->cBuffers = 1; + pProperties->cbBuffer = pvi->bmiHeader.biSizeImage; + + ALLOCATOR_PROPERTIES Actual; + hr = pAlloc->SetProperties(pProperties,&Actual); + + if(FAILED(hr)) return hr; + if(Actual.cbBuffer < pProperties->cbBuffer) return E_FAIL; + + return NOERROR; +} // DecideBufferSize + +// Called when graph is run +HRESULT CCameraStream::OnThreadCreate() +{ + //m_iRepeatTime = m_iDefaultRepeatTime; + m_rtLastTime = 0; + return NOERROR; +} // OnThreadCreate + + +////////////////////////////////////////////////////////////////////////// +// IAMStreamConfig +////////////////////////////////////////////////////////////////////////// + +HRESULT STDMETHODCALLTYPE CCameraStream::SetFormat(AM_MEDIA_TYPE *pmt) +{ + DECLARE_PTR(VIDEOINFOHEADER, pvi, m_mt.pbFormat); + m_mt = *pmt; + IPin* pin; + ConnectedTo(&pin); + if(pin) + { + IFilterGraph *pGraph = m_pParent->GetGraph(); + pGraph->Reconnect(this); + } + return S_OK; +} + +HRESULT STDMETHODCALLTYPE CCameraStream::GetFormat(AM_MEDIA_TYPE **ppmt) +{ + *ppmt = CreateMediaType(&m_mt); + return S_OK; +} + +HRESULT STDMETHODCALLTYPE CCameraStream::GetNumberOfCapabilities(int *piCount, int *piSize) +{ + *piCount = 8; + *piSize = sizeof(VIDEO_STREAM_CONFIG_CAPS); + return S_OK; +} + +HRESULT STDMETHODCALLTYPE CCameraStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC) +{ + *pmt = CreateMediaType(&m_mt); + DECLARE_PTR(VIDEOINFOHEADER, pvi, (*pmt)->pbFormat); + + if (iIndex == 0) iIndex = 4; + + pvi->bmiHeader.biCompression = BI_RGB; + pvi->bmiHeader.biBitCount = 24; + pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); + pvi->bmiHeader.biWidth = 80 * iIndex; + pvi->bmiHeader.biHeight = 60 * iIndex; + pvi->bmiHeader.biPlanes = 1; + pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); + pvi->bmiHeader.biClrImportant = 0; + + SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered. + SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle + + (*pmt)->majortype = MEDIATYPE_Video; + (*pmt)->subtype = MEDIASUBTYPE_RGB24; + (*pmt)->formattype = FORMAT_VideoInfo; + (*pmt)->bTemporalCompression = FALSE; + (*pmt)->bFixedSizeSamples= FALSE; + (*pmt)->lSampleSize = pvi->bmiHeader.biSizeImage; + (*pmt)->cbFormat = sizeof(VIDEOINFOHEADER); + + DECLARE_PTR(VIDEO_STREAM_CONFIG_CAPS, pvscc, pSCC); + + pvscc->guid = FORMAT_VideoInfo; + pvscc->VideoStandard = AnalogVideo_None; + pvscc->InputSize.cx = 640; + pvscc->InputSize.cy = 480; + pvscc->MinCroppingSize.cx = 80; + pvscc->MinCroppingSize.cy = 60; + pvscc->MaxCroppingSize.cx = 640; + pvscc->MaxCroppingSize.cy = 480; + pvscc->CropGranularityX = 80; + pvscc->CropGranularityY = 60; + pvscc->CropAlignX = 0; + pvscc->CropAlignY = 0; + + pvscc->MinOutputSize.cx = 80; + pvscc->MinOutputSize.cy = 60; + pvscc->MaxOutputSize.cx = 640; + pvscc->MaxOutputSize.cy = 480; + pvscc->OutputGranularityX = 0; + pvscc->OutputGranularityY = 0; + pvscc->StretchTapsX = 0; + pvscc->StretchTapsY = 0; + pvscc->ShrinkTapsX = 0; + pvscc->ShrinkTapsY = 0; + pvscc->MinFrameInterval = 200000; //50 fps + pvscc->MaxFrameInterval = 50000000; // 0.2 fps + pvscc->MinBitsPerSecond = (80 * 60 * 3 * 8) / 5; + pvscc->MaxBitsPerSecond = 640 * 480 * 3 * 8 * 50; + + return S_OK; +} + +////////////////////////////////////////////////////////////////////////// +// IKsPropertySet +////////////////////////////////////////////////////////////////////////// + + +HRESULT CCameraStream::Set(REFGUID guidPropSet, DWORD dwID, void *pInstanceData, + DWORD cbInstanceData, void *pPropData, DWORD cbPropData) +{// Set: Cannot set any properties. + return E_NOTIMPL; +} + +// Get: Return the pin category (our only property). +HRESULT CCameraStream::Get( + REFGUID guidPropSet, // Which property set. + DWORD dwPropID, // Which property in that set. + void *pInstanceData, // Instance data (ignore). + DWORD cbInstanceData, // Size of the instance data (ignore). + void *pPropData, // Buffer to receive the property data. + DWORD cbPropData, // Size of the buffer. + DWORD *pcbReturned // Return the size of the property. +) +{ + if (guidPropSet == AMPROPSETID_Pin) + { + if (dwPropID != AMPROPERTY_PIN_CATEGORY) return E_PROP_ID_UNSUPPORTED; + if (pPropData == NULL && pcbReturned == NULL) return E_POINTER; + + if (pcbReturned) *pcbReturned = sizeof(GUID); + if (pPropData == NULL) return S_OK; // Caller just wants to know the size. + if (cbPropData < sizeof(GUID)) return E_UNEXPECTED;// The buffer is too small. + + *(GUID *)pPropData = PIN_CATEGORY_CAPTURE; + return S_OK; + } + + + + return E_PROP_SET_UNSUPPORTED; +} + +// QuerySupported: Query whether the pin supports the specified property. +HRESULT CCameraStream::QuerySupported(REFGUID guidPropSet, DWORD dwPropID, DWORD *pTypeSupport) +{ + if (guidPropSet != AMPROPSETID_Pin) return E_PROP_SET_UNSUPPORTED; + if (dwPropID != AMPROPERTY_PIN_CATEGORY) return E_PROP_ID_UNSUPPORTED; + // We support getting this property, but not setting it. + if (pTypeSupport) *pTypeSupport = KSPROPERTY_SUPPORT_GET; + return S_OK; +} + +DWORD CCameraStream::ThreadProc() +{ + return CSourceStream::ThreadProc(); +} \ No newline at end of file diff --git a/winsource/winsource.def b/winsource/winsource.def new file mode 100644 index 0000000..8d09658 --- /dev/null +++ b/winsource/winsource.def @@ -0,0 +1,9 @@ + +LIBRARY winsource.dll + +EXPORTS + DllMain PRIVATE + DllGetClassObject PRIVATE + DllCanUnloadNow PRIVATE + DllRegisterServer PRIVATE + DllUnregisterServer PRIVATE diff --git a/winsource/winsource.h b/winsource/winsource.h new file mode 100644 index 0000000..832146b --- /dev/null +++ b/winsource/winsource.h @@ -0,0 +1,131 @@ +// {3A24BD2F-B9B1-4B32-9A1E-17791624B6AB} +DEFINE_GUID(CLSID_Kinatomic_Camera, +0x3a24bd2f, 0xb9b1, 0x4b32, 0x9a, 0x1e, 0x17, 0x79, 0x16, 0x24, 0xb6, 0xab); + +#define DECLARE_PTR(type, ptr, expr) type* ptr = (type*)(expr); + +//------------------------------------------------------------------------------ +// Forward Declarations +//------------------------------------------------------------------------------ +// The class managing the output pin +class CCameraStream; + + +//------------------------------------------------------------------------------ +// Class CCameraOutput +// +// This is the main class for the camera output. It inherits from +// CSource, the DirectShow base class for source filters. +//------------------------------------------------------------------------------ +class CCameraOutput : public CSource +{ +public: + + // The only allowed way to create Bouncing balls! + static CUnknown * WINAPI CreateInstance(LPUNKNOWN lpunk, HRESULT *phr); + + STDMETHODIMP QueryInterface(REFIID riid, void **ppv); + IFilterGraph *GetGraph() {return m_pGraph;} +private: + + // It is only allowed to to create these objects with CreateInstance + CCameraOutput(LPUNKNOWN lpunk, HRESULT *phr); + +}; // CBouncingBall + + +//------------------------------------------------------------------------------ +// Class CCameraStream +// +// This class implements the stream which is used to output the bouncing ball +// data from the source filter. It inherits from DirectShows's base +// CSourceStream class. +//------------------------------------------------------------------------------ +class CCameraStream : public CSourceStream, public IAMStreamConfig, public IKsPropertySet +{ + +public: + + CCameraStream(HRESULT *phr, CCameraOutput *pParent, LPCWSTR pPinName); + ~CCameraStream(); + + // Update image buffer + HRESULT FillBuffer(IMediaSample *pms); + + // Ask for buffers of the size appropriate to the agreed media type + HRESULT DecideBufferSize(IMemAllocator *pIMemAlloc, + ALLOCATOR_PROPERTIES *pProperties); + + // Set the agreed media type + HRESULT SetMediaType(const CMediaType *pMediaType); + + HRESULT CheckMediaType(const CMediaType *pMediaType); + HRESULT GetMediaType(int iPosition, CMediaType *pmt); + + // Resets the stream time to zero + HRESULT OnThreadCreate(void); + + // Quality control notifications sent to us + STDMETHODIMP Notify(IBaseFilter * pSender, Quality q); + + DWORD ThreadProc(); + int EstablishPipeConnection(); + int ReceiveDataViaNamedPipe(); + void SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 bufflen); + void SendErrorViaNamedPipe(UINT32 errCode); + + STDMETHODIMP QueryInterface(REFIID riid, void **ppv); + STDMETHODIMP_(ULONG) AddRef() { return GetOwner()->AddRef(); } \ + STDMETHODIMP_(ULONG) Release() { return GetOwner()->Release(); } + + HRESULT STDMETHODCALLTYPE SetFormat(AM_MEDIA_TYPE *pmt); + HRESULT STDMETHODCALLTYPE GetFormat(AM_MEDIA_TYPE **ppmt); + HRESULT STDMETHODCALLTYPE GetNumberOfCapabilities(int *piCount, int *piSize); + HRESULT STDMETHODCALLTYPE GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC); + + HRESULT STDMETHODCALLTYPE Set(REFGUID guidPropSet, DWORD dwID, void *pInstanceData, DWORD cbInstanceData, void *pPropData, DWORD cbPropData); + HRESULT STDMETHODCALLTYPE Get(REFGUID guidPropSet, DWORD dwPropID, void *pInstanceData,DWORD cbInstanceData, void *pPropData, DWORD cbPropData, DWORD *pcbReturned); + HRESULT STDMETHODCALLTYPE QuerySupported(REFGUID guidPropSet, DWORD dwPropID, DWORD *pTypeSupport); + +private: + + //int m_iImageHeight; // The current image height + //int m_iImageWidth; // And current image width + //int m_iRepeatTime; // Time in msec between frames + //const int m_iDefaultRepeatTime; // Initial m_iRepeatTime + + //BYTE m_BallPixel[4]; // Represents one coloured ball + //int m_iPixelSize; // The pixel size in bytes + //PALETTEENTRY m_Palette[256]; // The optimal palette for the image + + CCritSec m_cSharedState; // Lock on m_rtSampleTime and m_Ball + CRefTime m_rtSampleTime; // The time stamp for each sample + //CBall *m_Ball; // The current ball object + CCameraOutput *m_pParent; + + char *rxBuff; + int rxBuffLen; + int rxBuffAlloc; + + REFERENCE_TIME m_rtLastTime; + + HANDLE pipeHandle; + OVERLAPPED rxo; + OVERLAPPED txo; + + BYTE *currentFrame; + LONG currentFrameLen; + int testCursor; + char *tmpBuff; + int fillBufferCount; + + FILETIME lastTxUpdateTime; + FILETIME lastRxUpdateTime; + + // set up the palette appropriately + //enum Colour {Red, Blue, Green, Yellow}; + //HRESULT SetPaletteEntries(Colour colour); + +}; // CCameraStream + + diff --git a/winsource/winsource.rc b/winsource/winsource.rc new file mode 100644 index 0000000..ae46255 --- /dev/null +++ b/winsource/winsource.rc @@ -0,0 +1,88 @@ +//Microsoft Developer Studio generated resource script. +// +// Version include +//#include + +#include "resource.h" + +#define APSTUDIO_READONLY_SYMBOLS +///////////////////////////////////////////////////////////////////////////// +// +// Generated from the TEXTINCLUDE 2 resource. +// +#include "windows.h" + +///////////////////////////////////////////////////////////////////////////// +#undef APSTUDIO_READONLY_SYMBOLS + +///////////////////////////////////////////////////////////////////////////// +// English (U.S.) resources + +#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU) +#ifdef _WIN32 +LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US +#pragma code_page(1252) +#endif //_WIN32 + +#ifdef APSTUDIO_INVOKED +///////////////////////////////////////////////////////////////////////////// +// +// TEXTINCLUDE +// + +1 TEXTINCLUDE DISCARDABLE +BEGIN + "resource.h\0" +END + +2 TEXTINCLUDE DISCARDABLE +BEGIN + "#include ""windows.h""\r\n" + "\0" +END + +3 TEXTINCLUDE DISCARDABLE +BEGIN + "\r\n" + "\0" +END + +#endif // APSTUDIO_INVOKED + + +///////////////////////////////////////////////////////////////////////////// +// +// String Table +// + +STRINGTABLE DISCARDABLE +BEGIN + IDS_TITLE "Bouncing Ball" +END + +#endif // English (U.S.) resources +///////////////////////////////////////////////////////////////////////////// + +// +// Version Info +// +#define VERSION_RES_BIN_NAME "Ball.dll\0" +#define VERSION_RES_BIN_DESCRIPTION "Bouncing Ball Filter (Sample)\0" + +#define AMOVIE_SELF_REGISTER + +//#include + + + + +#ifndef APSTUDIO_INVOKED +///////////////////////////////////////////////////////////////////////////// +// +// Generated from the TEXTINCLUDE 3 resource. +// + + +///////////////////////////////////////////////////////////////////////////// +#endif // not APSTUDIO_INVOKED + diff --git a/winsource/winsource.sln b/winsource/winsource.sln new file mode 100644 index 0000000..7472e50 --- /dev/null +++ b/winsource/winsource.sln @@ -0,0 +1,26 @@ + +Microsoft Visual Studio Solution File, Format Version 11.00 +# Visual Studio 2010 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "winsource", "winsource.vcxproj", "{9D3C9114-5067-45E6-B83D-12D31EF86297}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Win32 = Debug|Win32 + Debug|x64 = Debug|x64 + Release|Win32 = Release|Win32 + Release|x64 = Release|x64 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Debug|Win32.ActiveCfg = Debug|Win32 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Debug|Win32.Build.0 = Debug|Win32 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Debug|x64.ActiveCfg = Debug|x64 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Debug|x64.Build.0 = Debug|x64 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Release|Win32.ActiveCfg = Release|Win32 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Release|Win32.Build.0 = Release|Win32 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Release|x64.ActiveCfg = Release|x64 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Release|x64.Build.0 = Release|x64 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/winsource/winsource.vcxproj b/winsource/winsource.vcxproj new file mode 100644 index 0000000..142c9e5 --- /dev/null +++ b/winsource/winsource.vcxproj @@ -0,0 +1,208 @@ + + + + + Debug + Win32 + + + Debug + x64 + + + Release + Win32 + + + Release + x64 + + + + {9D3C9114-5067-45E6-B83D-12D31EF86297} + winsource + Win32Proj + + + + DynamicLibrary + Unicode + + + DynamicLibrary + Unicode + + + DynamicLibrary + Unicode + + + DynamicLibrary + Unicode + + + + + + + + + + + + + + + + + + + <_ProjectFileVersion>10.0.40219.1 + Debug\ + Debug\ + true + Release\ + Release\ + false + $(Platform)\$(Configuration)\ + $(Platform)\$(Configuration)\ + true + $(Platform)\$(Configuration)\ + $(Platform)\$(Configuration)\ + false + AllRules.ruleset + + + AllRules.ruleset + + + AllRules.ruleset + + + AllRules.ruleset + + + C:\Program Files\Microsoft SDKs\Windows\v7.1\Samples\multimedia\directshow\baseclasses;$(VCInstallDir)include;$(VCInstallDir)atlmfc\include;$(WindowsSdkDir)include;$(FrameworkSDKDir)\include; + C:\Program Files\Microsoft SDKs\Windows\v7.1\Samples\multimedia\directshow\baseclasses;$(VCInstallDir)include;$(VCInstallDir)atlmfc\include;$(WindowsSdkDir)include;$(FrameworkSDKDir)\include; + C:\Program Files\Microsoft SDKs\Windows\v7.1\Samples\multimedia\directshow\baseclasses\Release;$(VCInstallDir)lib;$(VCInstallDir)atlmfc\lib;$(WindowsSdkDir)lib;$(FrameworkSDKDir)\lib + C:\Program Files\Microsoft SDKs\Windows\v7.1\Samples\multimedia\directshow\baseclasses\Debug;$(VCInstallDir)lib;$(VCInstallDir)atlmfc\lib;$(WindowsSdkDir)lib;$(FrameworkSDKDir)\lib + + + + Disabled + ..\..\BaseClasses\;%(AdditionalIncludeDirectories) + WIN32;_DEBUG;_WINDOWS;_USRDLL;WINSOURCE_EXPORTS;%(PreprocessorDefinitions) + true + EnableFastChecks + MultiThreadedDebugDLL + + + Level3 + EditAndContinue + StdCall + + + strmbasd.lib;winmm.lib;msvcrtd.lib;%(AdditionalDependencies) + %(AdditionalLibraryDirectories) + true + winsource.def + true + Windows + MachineX86 + + + + + MaxSpeed + ..\..\BaseClasses\;$(DXSDK_DIR)\Include\;%(AdditionalIncludeDirectories) + WIN32;NDEBUG;_WINDOWS;_USRDLL;WINSOURCE_EXPORTS;%(PreprocessorDefinitions) + MultiThreadedDLL + + + Level3 + ProgramDatabase + StdCall + + + strmbase.lib;winmm.lib;msvcrt.lib;%(AdditionalDependencies) + ..\..\BaseClasses\;%(AdditionalLibraryDirectories) + true + winsource.def + true + Windows + true + true + MachineX86 + + + + + X64 + + + Disabled + ..\..\BaseClasses\;%(AdditionalIncludeDirectories) + WIN32;_DEBUG;_WINDOWS;_USRDLL;WINSOURCE_EXPORTS;%(PreprocessorDefinitions) + true + EnableFastChecks + MultiThreadedDebugDLL + + + Level3 + ProgramDatabase + StdCall + + + strmbasd.lib;winmm.lib;msvcrtd.lib;%(AdditionalDependencies) + ..\..\BaseClasses\x64\Debug\;%(AdditionalLibraryDirectories) + true + winsource.def + true + Windows + MachineX64 + + + + + X64 + + + MaxSpeed + ..\..\BaseClasses\;$(DXSDK_DIR)\Include\;%(AdditionalIncludeDirectories) + WIN32;NDEBUG;_WINDOWS;_USRDLL;WINSOURCE_EXPORTS;%(PreprocessorDefinitions) + MultiThreadedDLL + + + Level3 + ProgramDatabase + StdCall + + + strmbase.lib;winmm.lib;msvcrt.lib;%(AdditionalDependencies) + ..\..\BaseClasses\x64\release;%(AdditionalLibraryDirectories) + true + winsource.def + true + Windows + true + true + MachineX64 + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/wintest/wintest.sln b/wintest/wintest.sln new file mode 100644 index 0000000..d1b9159 --- /dev/null +++ b/wintest/wintest.sln @@ -0,0 +1,20 @@ + +Microsoft Visual Studio Solution File, Format Version 11.00 +# Visual Studio 2010 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "wintest", "wintest\wintest.vcxproj", "{6152A268-A7CF-4E82-8E0B-A2FDC4342AFD}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Win32 = Debug|Win32 + Release|Win32 = Release|Win32 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {6152A268-A7CF-4E82-8E0B-A2FDC4342AFD}.Debug|Win32.ActiveCfg = Debug|Win32 + {6152A268-A7CF-4E82-8E0B-A2FDC4342AFD}.Debug|Win32.Build.0 = Debug|Win32 + {6152A268-A7CF-4E82-8E0B-A2FDC4342AFD}.Release|Win32.ActiveCfg = Release|Win32 + {6152A268-A7CF-4E82-8E0B-A2FDC4342AFD}.Release|Win32.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/wintest/wintest/ReadMe.txt b/wintest/wintest/ReadMe.txt new file mode 100644 index 0000000..e69e676 --- /dev/null +++ b/wintest/wintest/ReadMe.txt @@ -0,0 +1,40 @@ +======================================================================== + CONSOLE APPLICATION : wintest Project Overview +======================================================================== + +AppWizard has created this wintest application for you. + +This file contains a summary of what you will find in each of the files that +make up your wintest application. + + +wintest.vcxproj + This is the main project file for VC++ projects generated using an Application Wizard. + It contains information about the version of Visual C++ that generated the file, and + information about the platforms, configurations, and project features selected with the + Application Wizard. + +wintest.vcxproj.filters + This is the filters file for VC++ projects generated using an Application Wizard. + It contains information about the association between the files in your project + and the filters. This association is used in the IDE to show grouping of files with + similar extensions under a specific node (for e.g. ".cpp" files are associated with the + "Source Files" filter). + +wintest.cpp + This is the main application source file. + +///////////////////////////////////////////////////////////////////////////// +Other standard files: + +StdAfx.h, StdAfx.cpp + These files are used to build a precompiled header (PCH) file + named wintest.pch and a precompiled types file named StdAfx.obj. + +///////////////////////////////////////////////////////////////////////////// +Other notes: + +AppWizard uses "TODO:" comments to indicate parts of the source code you +should add to or customize. + +///////////////////////////////////////////////////////////////////////////// diff --git a/wintest/wintest/wintest.cpp b/wintest/wintest/wintest.cpp new file mode 100644 index 0000000..c89054e --- /dev/null +++ b/wintest/wintest/wintest.cpp @@ -0,0 +1,19 @@ +// wintest.cpp : Defines the entry point for the console application. +// + +//#include "stdafx.h" +#include "../../mfvideoout.h" +#include + +int main(int argc, char* argv[]) +{ + class MfVideoOut mfVideoOut("test"); + + while(1) + { + Sleep(100); + } + + return 0; +} + diff --git a/wintest/wintest/wintest.vcxproj b/wintest/wintest/wintest.vcxproj new file mode 100644 index 0000000..784042e --- /dev/null +++ b/wintest/wintest/wintest.vcxproj @@ -0,0 +1,90 @@ + + + + + Debug + Win32 + + + Release + Win32 + + + + {6152A268-A7CF-4E82-8E0B-A2FDC4342AFD} + Win32Proj + wintest + + + + Application + true + Unicode + + + Application + false + true + Unicode + + + + + + + + + + + + + true + + + false + + + + NotUsing + Level3 + Disabled + WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions) + + + Console + true + kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies);Mfplat.lib;Mf.lib;Mfreadwrite.lib + + + + + Level3 + NotUsing + MaxSpeed + true + true + WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions) + + + Console + true + true + true + kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies);Mfplat.lib;Mf.lib;Mfreadwrite.lib + + + + + + + + + + + + + + + + + \ No newline at end of file