diff --git a/CMakeLists.txt b/CMakeLists.txt index 68b52f9..799b198 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -3,7 +3,7 @@ project (FicTrac) # The version number. set (FICTRAC_VERSION_MAJOR 2) -set (FICTRAC_VERSION_MINOR 0) +set (FICTRAC_VERSION_MINOR 2) # output version info to be included by project configure_file ( @@ -13,7 +13,7 @@ configure_file ( # dependency search dirs set(OPENCV_DIR "." CACHE PATH "Path to OpenCV folder containing OpenCVConfig.cmake") -set(NLOPT_DIR "." CACHE PATH "Path to NLOpt folder containing libnlopt-0.lib") +set(NLOPT_DIR "." CACHE PATH "Path to NLopt folder containing NLoptConfig.cmake") # output dirs set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/lib) @@ -22,43 +22,51 @@ set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/bin) # optional build config option(PGR_USB3 "Use Spinnaker SDK to capture from PGR USB3 cameras" OFF) # Disabled by default +option(PGR_USB2 "Use FlyCapture SDK to capture from PGR USB2 cameras" OFF) # Disabled by default if(PGR_USB3) set(PGR_DIR "." CACHE PATH "Path to PGR Spinnaker SDK folder") +elseif(PGR_USB2) + set(PGR_DIR "." CACHE PATH "Path to PGR FlyCapture SDK folder") endif() # find dependencies set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} ${OPENCV_DIR} ${NLOPT_DIR}) +find_package(OpenCV REQUIRED) +find_package(NLopt CONFIG REQUIRED) +if(NLopt_FOUND) + get_filename_component(NLopt_INCLUDE_DIRS "${NLOPT_CONFIG_FILE}/../../../include" REALPATH) + # set(NLopt_LIBS ${NLOPT_LIBRARY_DIRS}/lib/nlopt.lib) + message(STATUS "Found NLopt: ${NLOPT_CONFIG_FILE}") +else() + message(FATAL_ERROR "Error! Could not find NLopt lib at ${NLOPT_LIB}!") +endif() if(MSVC) - find_package(opencv) - find_library(NLOPT_LIB libnlopt-0.lib) - if(PGR_USB3) set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} ${PGR_DIR}/lib64/vs2015) find_library(PGR_LIB Spinnaker_v140.lib) + elseif(PGR_USB2) + set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} ${PGR_DIR}/lib64/vs2015) + find_library(PGR_LIB FlyCapture2_v140.lib) endif() else() # gcc - find_package(OpenCV) - find_library(NLOPT_LIB libnlopt.a) - if(PGR_USB3) set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} ${PGR_DIR}) find_library(PGR_LIB libSpinnaker.so) + elseif(PGR_USB2) + set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} ${PGR_DIR}) + find_library(PGR_LIB libflycapture.so) endif() endif() -get_filename_component(NLOPT_DIR ${NLOPT_LIB} DIRECTORY) -if(NLOPT_LIB) - message(STATUS "Found NLOpt lib ${NLOPT_LIB}") +if(PGR_USB2 OR PGR_USB3) + get_filename_component(PGR_DIR ${PGR_LIB} DIRECTORY) + get_filename_component(PGR_DIR ${PGR_DIR} DIRECTORY) # step up 1 level if(MSVC) - message(STATUS "You might need to add ${NLOPT_DIR} to your PATH to be able to run the executable.") + get_filename_component(PGR_DIR ${PGR_DIR} DIRECTORY) # step up 1 level endif() -else() - message(FATAL_ERROR "Error! Could not find NLOpt lib at ${NLOPT_DIR}!") -endif() -if(PGR_USB3) if(PGR_LIB) - message(STATUS "Found PGR Spinnaker lib ${PGR_LIB}") + message(STATUS "Found PGR FlyCapture/Spinnaker lib ${PGR_LIB}") else() - message(FATAL_ERROR "Error! Could not find PGR Spinnaker lib at ${PGR_DIR}!") + message(FATAL_ERROR "Error! Could not find PGR FlyCapture/Spinnaker lib at ${PGR_DIR}!") endif() endif() @@ -66,75 +74,90 @@ endif() find_package(cppzmq) # add include dirs -include_directories(${PROJECT_SOURCE_DIR}/include ${OpenCV_INCLUDE_DIRS} ${NLOPT_DIR}) -if(PGR_USB3) - include_directories(${PGR_DIR}/include) - include_directories(${PGR_DIR}/include/spinnaker) +include_directories(${PROJECT_SOURCE_DIR}/include ${OpenCV_INCLUDE_DIRS} ${NLopt_INCLUDE_DIRS}) +if(PGR_USB2 OR PGR_USB3) + if(MSVC) + include_directories(${PGR_DIR}/include) + else() + if(PGR_USB2) + include_directories(${PGR_DIR}/include/flycapture) # for ubuntu default install dir + elseif(PGR_USB3) + include_directories(${PGR_DIR}/include/spinnaker) # for ubuntu default install dir + endif() + endif() endif() # find sources to build file(GLOB LIBFICTRAC_SRCS ${PROJECT_SOURCE_DIR}/src/*.cpp) # add targets -add_library(libfictrac STATIC ${LIBFICTRAC_SRCS}) +add_library(fictrac_core STATIC ${LIBFICTRAC_SRCS}) add_executable(configGui ${PROJECT_SOURCE_DIR}/exec/configGui.cpp) add_executable(fictrac ${PROJECT_SOURCE_DIR}/exec/fictrac.cpp) # add preprocessor definitions # public means defs will be inherited by linked executables -target_compile_definitions(libfictrac PUBLIC _CRT_SECURE_NO_WARNINGS NOMINMAX) -if(PGR_USB3) - target_compile_definitions(libfictrac PUBLIC PGR_USB3) +target_compile_definitions(fictrac_core PUBLIC _CRT_SECURE_NO_WARNINGS NOMINMAX) +if(PGR_USB2) + target_compile_definitions(fictrac_core PUBLIC PGR_USB2) +elseif(PGR_USB3) + target_compile_definitions(fictrac_core PUBLIC PGR_USB3) endif() # add compile options if(MSVC) - target_compile_options(libfictrac PUBLIC $<$:/MP /GS /GL /W3 /WX- /Gy /Zc:wchar_t /O2 /Oi /Zc:inline /fp:precise /MD /EHsc>) + target_compile_options(fictrac_core PUBLIC $<$:/MP /GS /GL /W3 /WX- /Gy /Zc:wchar_t /O2 /Oi /Zc:inline /fp:precise /MD /EHsc /std:c++17>) + # set(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE} /LTCG") else() # gcc - target_compile_options(libfictrac PUBLIC -Ofast -Wall -c -fmessage-length=0 -std=c++14 -Wno-unused-function -march=native -MMD) + target_compile_options(fictrac_core PUBLIC -Ofast -Wall -c -fmessage-length=0 -std=c++17 -Wno-unused-function -march=native -MMD) endif() -# linking and post-build -target_link_libraries(libfictrac PUBLIC ${OpenCV_LIBS} ${NLOPT_LIB} cppzmq) -if(PGR_USB3) - target_link_libraries(libfictrac PUBLIC ${PGR_LIB}) +# linking +target_link_libraries(fictrac_core PUBLIC ${OpenCV_LIBS} ${NLOPT_LIBRARIES} cppzmq) +if(MSVC) + target_link_libraries(fictrac_core PUBLIC Ws2_32) +else() # gcc + target_link_libraries(fictrac_core PUBLIC pthread) endif() +if(PGR_USB2 OR PGR_USB3) + target_link_libraries(fictrac_core PUBLIC ${PGR_LIB}) +endif() + +# post-build if(MSVC) - # win-specific libraries - target_link_libraries(libfictrac PUBLIC Ws2_32) - # copy all opencv dlls set(OPENCV_VER_STRING ${OpenCV_VERSION_MAJOR}${OpenCV_VERSION_MINOR}${OpenCV_VERSION_PATCH}) foreach(lib ${OpenCV_LIBS}) - if(EXISTS "${_OpenCV_LIB_PATH}/${lib}${OPENCV_VER_STRING}.dll") - list(APPEND TO_COPY "${_OpenCV_LIB_PATH}/${lib}${OPENCV_VER_STRING}.dll" "${_OpenCV_LIB_PATH}/${lib}${OPENCV_VER_STRING}d.dll") + if(EXISTS "${OpenCV_INSTALL_PATH}/bin/${lib}${OPENCV_VER_STRING}.dll") + list(APPEND TO_COPY "${OpenCV_INSTALL_PATH}/bin/${lib}${OPENCV_VER_STRING}.dll") endif() endforeach() - set(FFMPEG_LIB_BASE opencv_ffmpeg${OPENCV_VER_STRING}) - if(${OpenCV_ARCH} STREQUAL x64) - set(FFMPEG_LIB ${FFMPEG_LIB_BASE}_64.dll) - else() - set(FFMPEG_LIB ${FFMPEG_LIB_BASE}.dll) - endif() - list(APPEND TO_COPY "${_OpenCV_LIB_PATH}/${FFMPEG_LIB}") + # set(FFMPEG_LIB_BASE opencv_ffmpeg${OPENCV_VER_STRING}) + # if("${OpenCV_ARCH}" STREQUAL x86) + # set(FFMPEG_LIB ${FFMPEG_LIB_BASE}.dll) + # else() + # # default to 64-bit + # set(FFMPEG_LIB ${FFMPEG_LIB_BASE}_64.dll) + # endif() + # list(APPEND TO_COPY "${OpenCV_INSTALL_PATH}/bin/${FFMPEG_LIB}") # copy nlopt dll - list(APPEND TO_COPY "${NLOPT_DIR}/libnlopt-0.dll") + # list(APPEND TO_COPY "${NLOPT_LIBRARY_DIRS}/bin/nlopt.dll") # copy h264 dll - list(APPEND TO_COPY "${PROJECT_SOURCE_DIR}/dll/openh264-1.7.0-win64.dll") + file(GLOB DLLS ${PROJECT_SOURCE_DIR}/dll/*.dll) + list(APPEND TO_COPY "${DLLS}") add_custom_command( TARGET fictrac POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_if_different ${TO_COPY} "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/$<$:Release>$<$:Debug>/") else() # gcc - # socket libs, pthread - target_link_libraries(libfictrac PUBLIC pthread) + # nothing here... endif() -target_link_libraries(configGui libfictrac) -add_dependencies(configGui libfictrac) -target_link_libraries(fictrac libfictrac) -add_dependencies(fictrac libfictrac) +target_link_libraries(configGui fictrac_core) +add_dependencies(configGui fictrac_core) +target_link_libraries(fictrac fictrac_core) +add_dependencies(fictrac fictrac_core) diff --git a/README.md b/README.md index f80ec02..b3da23b 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@

-**FicTrac** is an open source software library and set of executables for reconstructing the fictive path of an animal walking on a patterned sphere. The software is fast, flexible, and easy to use and simplifies the setup of closed-loop tracking experiments. +**FicTrac** is an open-source software library for reconstructing the fictive path of an animal walking on a patterned sphere. The software is fast, flexible, easy to use, and simplifies the setup of closed-loop tracking experiments. FicTrac was originally developed by researchers at the [Queensland Brain Institute](http://qbi.uq.edu.au/) at the University of Queensland, Australia for tracking honeybees and fruit flies during closed-loop tethered walking experiments, but it has since proved useful for tracking a wide range of animals with different movement speeds, track ball diameters and patterns, and stimuli. @@ -14,18 +14,18 @@ On this page you'll find information for: You might also be interested in the following links: * [Demo video](http://youtu.be/BeGYOEOdWjw) - Quick (30s) overview of what FicTrac does and how it works. -* [FicTrac manual](http://link) - Detailed instructions, description of output data, parameters, recommendations, etc. +* [FicTrac manual](doc/requirements.md) - Detailed instructions, description of output data, parameters, recommendations, etc. * [Homepage](http://fictrac.rjdmoore.net) - Contact details for the main author/developer, links, and further info. -* [Forum](http://www.reddit.com/r/fictrac/) - Subreddit for FicTrac users to share issues and advice. +* [Forum](http://www.reddit.com/r/fictrac/) - Subreddit for faqs, support, advice, discussions, etc. * [Mailing list](http://fictrac.rjdmoore.net/mail.html) - Subscribe to receive important announcements and updates. Happy tracking! ## Getting started -If you're just setting up your lab, or wondering whether FicTrac is suitable for your setup (spoiler: yes, probably), check the [section below](#hardware-requirements) for the basic requirements. +If you're just setting up your lab, or wondering whether FicTrac is suitable for your setup (spoiler: yes, probably), check the [hardware requirements section below](#hardware-requirements) for the basic requirements. -If you already have an experimental enclosure with a camera, you can use FicTrac to either process those videos offline or to run live from the camera. Just follow the sections below to [install](#installation), [configure](#configuration), and [run FicTrac](#running-fictrac). +If you already have an experimental enclosure with a camera, you can use FicTrac to either process recorded videos offline or to run live from the camera. Skip ahead to [install](#installation), [configure](#configuration), and [run FicTrac](#running-fictrac). ### Hardware requirements @@ -44,130 +44,103 @@ FicTrac imposes no requirements on the *italicised* items; how you design these ### Installation -The FicTrac source code can be built for both Windows and Ubuntu (Linux) operating systems. You can even build and run FicTrac from within a [virtual machine](https://www.virtualbox.org/) on any operating system. +The FicTrac source code can be built for both Windows and Linux (e.g. Ubuntu) operating systems, or you can build and run FicTrac from within a [virtual machine](https://www.virtualbox.org/) on any operating system. The following instructions are for a 64-bit machine, if you are using a 32-bit machine you will need to replace x64 with x86 in the instructions below. -**Note:** If you plan on using a USB3 camera, FicTrac may have issues using the OpenCV capture interface. The work around is to tell FicTrac to use the SDK provided with your camera instead of OpenCV to do the frame grabbing. See [USB3 camera installation](#usb3-camera-installation). +1. Download and install required build tools and dependencies: + 1. Windows only: + 1. [Cmake build system](https://cmake.org/download/) (Windows win64-x64 Installer) + 2. If you don't already have Visual Studio (C++ workflow) installed, you will need to install the [Build Tools for Visual Studio](https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2017). + 2. Linux (Ubuntu) only: + 1. Run the following from terminal to install necessary build tools and dependencies: ```[Linux] sudo apt-get install gcc git cmake curl unzip tar yasm pkg-config libgtk2.0-dev libavformat-dev libavcodec-dev libavresample-dev libswscale-dev``` + 3. (Windows and Linux) Clone or download the [Vcpkg](https://github.com/Microsoft/vcpkg) repository and then follow the guide to install (make sure to perform the bootstrap and integration steps). + 4. Using Vcpkg, install OpenCV, NLopt, and Boost::asio software packages (this may take 10-30 mins): -#### Windows installation +``` +[Windows] .\vcpkg install opencv[ffmpeg]:x64-windows nlopt:x64-windows boost-asio:x64-windows ffmpeg[*]:x64-windows cppzmq:x64-windows +[Linux] ./vcpkg install opencv[ffmpeg]:x64-linux nlopt:x64-linux boost-asio:x64-linux ffmpeg[*]:x64-linux cppzmq:x64-linux +``` -1. Download and install required dependencies: - 1. [Cmake build system](https://cmake.org/download/) (binary distribution) - 2. [OpenCV computer vision library](https://opencv.org/releases.html) (latest release Win pack) - 3. [NLopt optimisation library](https://nlopt.readthedocs.io/en/latest/NLopt_on_Windows/) (precompiled DLL) 2. Clone or download the FicTrac repository, then navigate to that folder, open a terminal, and create a build directory: ``` mkdir build cd build ``` -3. Next, we will configure and build the FicTrac project. FicTrac is written in C++, so you'll need a suitable compiler. In this example we will use MSVS Build Tools. If you don't already have Visual Studio, you will need to install the [build tools](https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2017). -4. Run Cmake to prepare the necessary build files for FicTrac. Here we also need to provide the paths to where we installed OpenCV and NLopt (I have given example paths here, you will need to modify them for your installation): +3. Run Cmake to prepare the necessary build files for FicTrac. Here, we will need to provide the path to the Cmake toolchain file that was installed by Vcpkg (this path is printed to terminal when you run the Vcpkg system-wide integration step). ``` -cmake -G "Visual Studio 15 2017 Win64" -D OPENCV_DIR="C:\path\to\opencv-3.4.2\build" -D NLOPT_DIR="C:\path\to\nlopt-2.4.2\" .. +[Windows] cmake -G "Visual Studio 15 2017 Win64" -D CMAKE_TOOLCHAIN_FILE=C:\path\to\vcpkg\scripts\buildsystems\vcpkg.cmake .. +[Linux] cmake -D CMAKE_TOOLCHAIN_FILE=/path/to/vcpkg/scripts/buildsystems/vcpkg.cmake .. ``` 5. Finally, build and install FicTrac: ``` -cmake --build . --config Release --target ALL_BUILD +[Windows] cmake --build . --config Release -j 4 +[Linux] cmake --build . --config Release -- -j 4 ``` If everything went well, the executables for FicTrac and a configuration utility will be placed under the `bin` directory in the FicTrac project folder. -**Note:** To save video on Windows, you must also have the [H264 library](https://github.com/cisco/openh264/releases) in the system path. OpenCV 3.4.2 requires `openh264-1.7.0-win64.dll`, which should be downloaded and placed in the same directory as the generated `fictrac.exe`. If you have installed another version of OpenCV, and it requires another version of the H264 library, an error message should be printed to the terminal when you run FicTrac. You can obtain other H264 versions from the above link. +Remember to update and re-build FicTrac occasionally, as the program is still under development and fixes and improvements are being made continuously. -#### Ubuntu (Linux) installation +| | | | | | +| --- | --- | --- | --- | --- | +| Build status | Windows | [![Build Status](https://dev.azure.com/rjdmoore/FicTrac/_apis/build/status/rjdmoore.fictrac?branchName=master&jobName=Windows)](https://dev.azure.com/rjdmoore/FicTrac/_build/latest?definitionId=1&branchName=master) | Linux | [![Build Status](https://dev.azure.com/rjdmoore/FicTrac/_apis/build/status/rjdmoore.fictrac?branchName=master&jobName=Linux)](https://dev.azure.com/rjdmoore/FicTrac/_build/latest?definitionId=1&branchName=master) | -1. Install the required dependencies: -``` -sudo apt-get install gcc cmake libavcodec-dev libavformat-dev libswscale-dev libv4l-dev libgtk-3-dev libdc1394-22-dev libopencv-dev libnlopt-dev -``` -2. Clone or download the FicTrac repository, then navigate to that folder and create a build directory: -``` -mkdir build -cd build -``` -3. Run Cmake to prepare the necessary build files for FicTrac (if OpenCV and NLopt are not installed in the default location, you can help Cmake find them by defining OPENCV_DIR and NLOPT_DIR - see [Windows installation](#windows-installation) for an example): -``` -cmake .. -``` -4. Finally, build and install FicTrac: -``` -make -j4 -``` +#### USB2/3 camera installation -If everything went well, the executables for FicTrac and a configuration utility will be placed under the `bin` directory in the FicTrac project folder. +If you are using an industrial USB2/3 camera and are receiving error messages when FicTrac tries to connect to your camera, you may need to tell FicTrac to use the SDK provided with your camera, rather than the generic OpenCV interface. The instructions for switching to the camera's SDK are different for each manufacturer. Currently there is support for PGR (FLIR) USB2/3 cameras via the Flycapture/Spinnaker SDK. -#### USB3 camera installation +##### PGR (FLIR) Flycapture SDK -If you are using a USB3 camera and are receiving error messages when FicTrac tries to connect to your camera, you may need to tell FicTrac to use the SDK provided with your camera, rather than the generic OpenCV interface. The instructions for switching to the camera's SDK are different for each manufacturer. Currently there is support for PGR (FLIR) USB3 cameras via the Spinnaker SDK. +1. Download and install the latest [Flycapture SDK](https://www.flir.com/products/flycapture-sdk/). +2. When preparing the build files for FicTrac using Cmake, you will need to specify to use Flycapture using the switch `-D PGR_USB2=ON` and depending on where you installed the SDK, you may also need to provide the SDK directory path using the switch `-D PGR_DIR=...`. For example, for a Windows installation you would replace step 3 above with: +``` +cmake -G "Visual Studio 15 2017 Win64" -D CMAKE_TOOLCHAIN_FILE=/scripts/buildsystems/vcpkg.cmake -D PGR_USB2=ON -D PGR_DIR="C:\path\to\Flycapture" .. +``` +3. Follow the other build steps as normal. + +Before running FicTrac, you may configure your camera (frame rate, resolution, etc) as desired using the SDK utilities. ##### PGR (FLIR) Spinnaker SDK -1. Download and install the Spinnaker SDK from [PGR downloads page](https://www.ptgrey.com/support/downloads). First select your camera model and operating system, and then download and install the latest Spinnaker SDK release. -2. When preparing the build files for FicTrac using Cmake, you will need to specify to use Spinnaker using the switch `-D PGR_USB3=ON` and depending on where you installed the SDK, you may also need to provide the SDK directory path using the switch `-D PGR_DIR=...`. For example, for a [Windows installation](#windows-installation) you would replace step 4 with: +1. Download and install the latest [Spinnaker SDK](https://www.flir.com/products/spinnaker-sdk/). +2. When preparing the build files for FicTrac using Cmake, you will need to specify to use Spinnaker using the switch `-D PGR_USB3=ON` and depending on where you installed the SDK, you may also need to provide the SDK directory path using the switch `-D PGR_DIR=...`. For example, for a Windows installation you would replace step 3 above with: ``` -cmake -G "Visual Studio 15 2017 Win64" -D OPENCV_DIR="C:\path\to\opencv-3.4.2\build" -D NLOPT_DIR="C:\path\to\nlopt-2.4.2\" -D PGR_USB3=ON -D PGR_DIR="C:\path\to\Spinnaker" .. +cmake -G "Visual Studio 15 2017 Win64" -D CMAKE_TOOLCHAIN_FILE=/scripts/buildsystems/vcpkg.cmake -D PGR_USB3=ON -D PGR_DIR="C:\path\to\Spinnaker" .. ``` -3. Follow the other build steps for either [Windows](#windows-installation) or [Ubuntu (Linux)](#ubuntu-linux-installation) as normal. +3. Follow the other build steps as normal. Before running FicTrac, you may configure your camera (frame rate, resolution, etc) as desired using the SDK utilities. ### Configuration -There are two neccessary steps to configure FicTrac prior to running the program: -1. You must provide a configuration text file that contains important parameters for your setup. At a minimum, this config file must define the parameters `src_fn` and `vfov`, which are the path to the image source (video file or camera) and vertical field of view (in degrees) of your camera/lens respectively. If you are running live from the camera, then `src_fn`is the camera index (e.g. 0). **The vertical field of view for your camera/lens must be specified accurately.** If `vfov` is incorrect, the surface map created by FicTrac will not wrap around the sphere correctly, and tracking will fail. An example config file is provided in the `sample` directory under the FicTrac project folder; you can use this file as a template to write your own config file. -2. You must run the interactive configuration program (configGui), passing the path to your config file (above) as an argument. This program will guide you through the configuration of the track ball region of interest within your input images and the transformation between the camera's and animal's frames of reference. **It is important that the camera is not moved after running the configuration utility. If the camera is moved, you must reconfigure.** After running the configuration utility, your config file will have some additional default parameters added automatically. +There are two necessary steps to configure FicTrac prior to running the program: +1. You must provide a text file that contains important [configuration parameters](doc/params.md) for your setup. At a minimum, this config file must define the parameters `src_fn` and `vfov`, which define the image source (path to video file or camera index) and vertical field of view (in degrees) of your camera respectively. You will find an example config file in the `sample` directory. +2. You must run the interactive configuration program (configGui). This program will guide you through the configuration of the track ball region of interest within your input images and the transformation between the camera's and animal's frames of reference. -A more detailed guide for configuring FicTrac for your setup, as well as a complete list and explanation of parameters that can be specified, can be found in the [FicTrac manual](). - -The commands for running the configuration utility under Windows and Ubuntu (Linux) are almost identical. Simply open a terminal in the FicTrac project folder and type: - -#### Windows -``` -.\bin\Release\configGui.exe path\to\config.txt -``` - -#### Ubuntu (Linux) -``` -./bin/configGui path/to/config.txt -``` +A more [detailed guide](doc/requirements.md) on how to configure FicTrac for your setup and an explanation of all the [configuration parameters](doc/params.md) can be found in the `doc` directory. ### Running FicTrac -Once you have configured FicTrac for your setup, you may run FicTrac simply by opening a terminal in the FicTrac project folder and typing: - -#### Windows +To configure FicTrac for the provided sample data, simply open a terminal in the FicTrac project folder and type: ``` -.\bin\Release\fictrac.exe path\to\config.txt +cd sample +[Windows] ..\bin\Release\configGui.exe config.txt +[Linux] ../bin/configGui config.txt ``` +The sample config file `config.txt` is already configured for the sample data, but you can step through the configuration process to check that everything looks ok. -#### Ubuntu (Linux) -We execute as super user so that we can set the FicTrac process to higher priority than other system processes. +Then, to run FicTrac, type: ``` -sudo ./bin/fictrac path/to/config.txt +[Windows] ..\bin\Release\fictrac.exe config.txt +[Linux] sudo ../bin/fictrac config.txt ``` -### Process sample data +FicTrac will usually generate two output files: +1. Log file (*.log) - containing debugging information about FicTrac's execution. +2. Data file (*.dat) - containing output data. See [data_header](doc/data_header.txt) for information about output data. -After you have build FicTrac, you can process the sample data set to make sure everything is running fine. Simply open a terminal in the FicTrac project folder and type: - -#### Windows -``` -cd sample -..\bin\Release\configGui.exe config.txt -``` -The sample config file `config.txt` is already configured for the sample data, but you can step through the configuration process to check that everything looks ok. Then, in the same terminal window, type: -``` -..\bin\Release\fictrac.exe config.txt -``` +The output data file can be used for offline processing. To use FicTrac within a closed-loop setup (to provide real-time feedback for stimuli), you should configure FicTrac to output data via a socket (IP address/port) in real-time. To do this, just set `out_port` to a valid port number in the config file. There is an example Python script for receiving data via sockets in the `scripts` directory. -#### Ubuntu (Linux) -``` -cd sample -../bin/configGui config.txt -``` -The sample config file `config.txt` is already configured for the sample data, but you can step through the configuration process to check that everything looks ok. Then, in the same terminal window, type: -``` -sudo ../bin/Release/fictrac.exe config.txt -``` +**Note:** If you encounter issues trying to generate output videos (i.e. `save_raw` or `save_debug`), you might try changing the default video codec via `vid_codec` - see [config params](doc/params.md) for details. If you receive an error about a missing [H264 library](https://github.com/cisco/openh264/releases), you can download the necessary library (i.e. OpenCV 3.4.3 requires `openh264-1.7.0-win64.dll`) from the above link and place it in the `dll` folder under the FicTrac main directory. You will then need to re-run the appropriate `cmake ..` and `cmake --build` commands for your installation. ## Research diff --git a/TODO.txt b/TODO.txt deleted file mode 100644 index 8e65b40..0000000 --- a/TODO.txt +++ /dev/null @@ -1,20 +0,0 @@ -== Development == -* write win/linux socket clients - - -== Testing == -* benchmark different ball patterns -* test with diverse recordings -* benchmark with noisy simulations -* compare performance with FicTrac v1 -* test sockets - - -== Misc == -* update website -* write new manual - - -== Research == -* SLAM-like map with elastic weights between pixels to enable distribution of error on loop closures? -* weight view matching to lessen impact of bad pixels near horizon? diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000..c002514 --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,55 @@ +# Azure CI/CD pipeline config file +# https://aka.ms/yaml + +# Trigger builds on master branch +# https://docs.microsoft.com/en-us/azure/devops/pipelines/build/triggers?view=azure-devops&tabs=yaml + +trigger: +- master +- develop + +# We can run multiple jobs in parallel. +# see https://docs.microsoft.com/en-us/azure/devops/pipelines/process/phases +jobs: + +# Provide a name for the job +- job: Linux + # The VM image to use for the hosted agent. For a list of possible agents + # see https://docs.microsoft.com/en-us/azure/devops/pipelines/agents/hosted + # You can see the software installed on each agent at the same link. + pool: + vmImage: 'ubuntu-latest' + # The steps to run to execute the build. + steps: + #- script: echo "set(VCPKG_BUILD_TYPE release)" >> $VCPKG_INSTALLATION_ROOT/triplets/x64-linux.cmake + # displayName: vcpkg set build type + - script: vcpkg install opencv[ffmpeg]:x64-linux nlopt:x64-linux boost-asio:x64-linux + displayName: vcpkg install dependencies + - task: CMake@1 + inputs: + workingDirectory: build + cmakeArgs: -D CMAKE_TOOLCHAIN_FILE=/usr/local/share/vcpkg/scripts/buildsystems/vcpkg.cmake .. + - task: CMake@1 + inputs: + workingDirectory: build + cmakeArgs: --build . --config Release + +# Provide a name for the job +- job: Windows + # The VM image to use for the hosted agent. For a list of possible agents + # see https://docs.microsoft.com/en-us/azure/devops/pipelines/agents/hosted + # You can see the software installed on each agent at the same link. + pool: + vmImage: 'vs2017-win2016' + # The steps to run to execute the build. + steps: + - script: vcpkg install opencv[ffmpeg]:x64-windows nlopt:x64-windows boost-asio:x64-windows + displayName: vcpkg install dependencies + - task: CMake@1 + inputs: + workingDirectory: build + cmakeArgs: -D CMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake -G "Visual Studio 15 2017 Win64" .. + - task: CMake@1 + inputs: + workingDirectory: build + cmakeArgs: --build . --config Release diff --git a/doc/data_header.txt b/doc/data_header.txt index 87d711a..053d897 100644 --- a/doc/data_header.txt +++ b/doc/data_header.txt @@ -1,41 +1,41 @@ - COL PARAMETER DESCRIPTION - 1 frame counter Corresponding video frame (starts at #1). - 2-4 delta rotation vector (cam) Change in orientation since last frame, - represented as rotation angle/axis (radians) - in camera coordinates (x right, y down, z - forward). - 5 delta rotation error score Error score associated with rotation - estimate. - 6-8 delta rotation vector (lab) Change in orientation since last frame, - represented as rotation angle/axis (radians) - in laboratory coordinates (see - *configImg.jpg). - 9-11 absolute rotation vector (cam) Absolute orientation of the sphere - represented as rotation angle/axis (radians) - in camera coordinates. - 12-14 absolute rotation vector (lab) Absolute orientation of the sphere - represented as rotation angle/axis (radians) - in laboratory coordinates. - 15-16 integrated x/y position (lab) Integrated x/y position (radians) in - laboratory coordinates. Scale by sphere - radius for true position (?). - 17 integrated animal heading (lab) Integrated heading orientation (radians) of - the animal in laboratory coordinates. This - is the direction the animal is facing. - 18 animal movement direction (lab) Instantaneous running direction (radians) of - the animal in laboratory coordinates. This is - the direction the animal is moving in the lab - frame (add to animal heading to get direction - in world). - 19 animal movement speed Instantaneous running speed (radians/frame) - of the animal. Scale by sphere radius for - true speed (?). - 20-21 integrated forward/side motion Integrated x/y position (radians) of the - sphere in laboratory coordinates neglecting - heading. Equivalent to the output from two - optic mice. - 22 timestamp Either position in video file (ms) or real - capture time for image frame. - 23 sequence counter Position in current frame sequence. Usually - corresponds directly to frame counter, but - can reset to 1 if tracking is reset. + COL PARAMETER DESCRIPTION + 1 frame counter Corresponding video frame (starts at #1). + 2-4 delta rotation vector (cam) Change in orientation since last frame, + represented as rotation angle/axis (radians) + in camera coordinates (x right, y down, z + forward). + 5 delta rotation error score Error score associated with rotation + estimate. + 6-8 delta rotation vector (lab) Change in orientation since last frame, + represented as rotation angle/axis (radians) + in laboratory coordinates (see + *configImg.jpg). + 9-11 absolute rotation vector (cam) Absolute orientation of the sphere + represented as rotation angle/axis (radians) + in camera coordinates. + 12-14 absolute rotation vector (lab) Absolute orientation of the sphere + represented as rotation angle/axis (radians) + in laboratory coordinates. + 15-16 integrated x/y position (lab) Integrated x/y position (radians) in + laboratory coordinates. Scale by sphere + radius for true position (?). + 17 integrated animal heading (lab) Integrated heading orientation (radians) of + the animal in laboratory coordinates. This + is the direction the animal is facing. + 18 animal movement direction (lab) Instantaneous running direction (radians) of + the animal in laboratory coordinates. This is + the direction the animal is moving in the lab + frame (add to animal heading to get direction + in world). + 19 animal movement speed Instantaneous running speed (radians/frame) + of the animal. Scale by sphere radius for + true speed (?). + 20-21 integrated forward/side motion Integrated x/y position (radians) of the + sphere in laboratory coordinates neglecting + heading. Equivalent to the output from two + optic mice. + 22 timestamp Either position in video file (ms) or real + capture time for image frame. + 23 sequence counter Position in current frame sequence. Usually + corresponds directly to frame counter, but + can reset to 1 if tracking is reset. diff --git a/doc/params.md b/doc/params.md new file mode 100644 index 0000000..a69dafa --- /dev/null +++ b/doc/params.md @@ -0,0 +1,41 @@ +This document provides a brief overview of FicTrac's configuration parameters. Many of these parameters are set automatically by the configuration utility. See the main documentation for instructions on how to [configure FicTrac](README.md#Configuration) before use. + +In the table below, the various possible parameters are listed. If nothing is listed under `Default value` then the parameter must be specified by the user. The valid range may use [interval notation](https://en.wikipedia.org/wiki/Interval_(mathematics)). The `Should I touch it?` column should give you some idea of which params to play around with. + +| Param name | Param type | Default value | Valid range | Should I touch it? | Description | +|------------|------------|---------------|-------------|---------------------|-------------| +| src_fn | string OR int | | int=\[0,inf) | Yes, you have to | A string that specifies the path to the input video file, OR an integer that specifies which of several connected USB cameras to use. Paths can be absolute or relative to the working directory. | +| vfov | float | | (0,inf) | Yes, you have to | Vertical field of view of the input images in degrees. | +| | | | | | | +| do_display | bool | y | y/n | If you want to | Display debug screen during tracking. Slows execution very slightly. | +| save_debug | bool | n | y/n | If you want to | Record the debug screen to video file. Note that if the source frame rate is higher than FicTrac's processing frame rate, frames may be dropped from the video file. | +| save_raw | bool | n | y/n | If you want to | Record the input image stream to video file. Note that if the source frame rate is higher than FicTrac's processing frame rate, frames may be dropped from the video file. | +| sock_port | int | -1 | (0,inf) | If you want to | Socket port over which to transmit FicTrac data. If unset or < 0, FicTrac will not transmit data over sockets. | +| com_port | string | | | If you want to | Serial port over which to transmit FicTrac data. If unset, FicTrac will not transmit data over serial. | +| com_baud | int | 115200 | | If you want to | Baud rate to use for COM port. Unused if no com_port set. | +| | | | | | | +| fisheye | bool | n | y/n | Only if you need to | If set, FicTrac will assume the imaging system has a fisheye lens, otherwise a rectilinear lens is assumed. | +| q_factor | int | 6 | (0,inf) | Only if you need to | Adjusts the resolution of the tracking window. Smaller values correspond to coarser but quicker tracking and vice-versa. Normally in the range [3,10]. | +| src_fps | float | -1 | (0,inf) | Only if you need to | If set, FicTrac will attempt to set the frame rate for the image source (video file or camera). | +| max_bad_frames | int | -1 | (0,inf) | Only if you need to | If set, FicTrac will reset tracking after being unable to match this many frames in a row. Defaults to never resetting tracking. | +| opt_do_global | bool | n | y/n | Only if you need to | Perform a slow global search after max_bad_frames are reached. This may allow FicTrac to recover after a tracking fail, but should only be used when playing back from video file, as it is slow! | +| opt_max_err | float | -1 | \[0,inf) | Only if you need to | If set, specifies the maximum allowable matching error before declaring a bad frame (i.e. tracking fail). Matching error is printed to screen during tracking (err=...), and also output in the [data file](doc/data_header.txt) (delta rotation error score). If unset, FicTrac will never detect bad matches (tracking will fail silently). | +| thr_ratio | float | 1.25 | (0,inf) | Only if you need to | Adjusts the adaptive thresholding of the input image. Values > 1 will favour foreground regions (more white in thresholded image) and values < 1 will favour background regions (more black in thresholded image). | +| thr_win_pc | float | 0.2 | \[0,1] | Only if you need to | Adjusts the size of the neighbourhood window to use for adaptive thresholding of the input image, specified as a percentage of the width of the tracking window. Larger values avoid over-segmentation, whilst smaller values make segmentation more robust to illumination gradients on the trackball. | +| vid_codec | string | h264 | [h264,xvid,mpg4,mjpg,raw] | Only if you need to | Specifies the video codec to use when writing output videos (see `save_raw` and `save_debug`). | +| sphere_map_fn | string | | | Only if you need to | If specified, FicTrac will attempt to load a previously generated sphere surface map from this filename. | +| | | | | | | +| opt_max_evals | int | 50 | (0,inf) | Probably not | Specifies the maximum number of minimisation iterations to perform each frame. Smaller values may improve tracking frame rate at the risk of finding sub-optimal matches. Number of optimisation iterations is printed to screen during tracking (its=...). | +| opt_bound | float | 0.35 | (0,inf) | Probably not | Specifies the optimisation search range in radians. Larger values will facilitate more track ball rotation per frame, but result in slower tracking and also possibly lead to false matches. | +| opt_tol | float | 0.001 | (0,inf) | Probably not | Specifies the minimisation termination criteria for absolute change in input parameters (delta rotation vector). | +| | | | | | | +| c2a_cnrs_xy | vec\ | | | Set by ConfigGui | Specifies the corners {X1,Y1,X2,Y2,...} of a square shape aligned with the animal's XY axes. Set interactively in ConfigGUI. | +| c2a_cnrs_yz | vec\ | | | Set by ConfigGui | Specifies the corners {X1,Y1,X2,Y2,...} of a square shape aligned with the animal's YZ axes. Set interactively in ConfigGUI. | +| c2a_cnrs_xz | vec\ | | | Set by ConfigGui | Specifies the corners {X1,Y1,X2,Y2,...} of a square shape aligned with the animal's XZ axes. Set interactively in ConfigGUI. | +| c2a_src | string | | | Set by ConfigGui | Specifies which of the above corner sets is used to compute the camera-animal transform. Set interactively in ConfigGUI. | +| c2a_r | vec\ | | | Set by ConfigGui | Rotational component of the camera-animal transform. Computed automatically by ConfigGUI. | +| c2a_t | vec\ | | | Set by ConfigGui | Translational component of the camera-animal transform. Computed automatically by ConfigGUI. | +| roi_circ | vec\ | | | Set by ConfigGui | Specifies points {X1,Y1,X2,Y2,...} around the circumference of the trackball in the input image. Set interactively in ConfigGUI. | +| roi_c | vec\ | | | Set by ConfigGui | Camera-frame vector describing the centre point of the trackball in the input image. Computed automatically by ConfigGUI. | +| roi_r | float | | | Set by ConfigGui | Half-angle describing the radius of the trackball in the input image. Computed automatically by ConfigGUI. | +| roi_ignr | vec> | | | Set by ConfigGui | Specifies possibly several polygon regions {{X11,Y11,X12,Y12,...},{X21,Y21,X22,Y22,...},...} that should be ignored during matching (e.g. where the animal obscures the trackball). Set interactively in ConfigGUI. | diff --git a/doc/requirements.md b/doc/requirements.md index 003c8ef..6a645a2 100644 --- a/doc/requirements.md +++ b/doc/requirements.md @@ -53,7 +53,7 @@ FicTrac can be built for both Windows and Ubuntu (Linux) operating systems. Ther For best performance, the processor should be reasonably fast (>2 GHz) and should be multi-core (ideally 4+). FicTrac uses <1 GB RAM. -On a ~3.2 GHz quadcore processor processor, and with default configuration settings (`q_factor : 6`), FicTrac runs at ~220 FPS. At a quality setting, (`q_factor : 4`), on the same machine, FicTrac runs at ~450 FPS. +On a ~3.2 GHz quadcore processor processor, and with default configuration settings (`q_factor : 6`), FicTrac runs at ~220 FPS. At a quality setting `q_factor : 4` on the same machine, FicTrac runs at ~450 FPS. ### Lighting diff --git a/doc/ubuntu_setup.txt b/doc/ubuntu_setup.txt deleted file mode 100644 index d3cf5f1..0000000 --- a/doc/ubuntu_setup.txt +++ /dev/null @@ -1,37 +0,0 @@ -Steps for setting up Ubuntu in virtual machine and building FicTrac from scratch. - -1. Download and install Oracle VM VirtualBox manager. -2. Download and install latest Lubuntu 64-bit operating system image (*.iso). - https://lubuntu.net/ -3. Open VirtualBox and create new machine - a. Adjust memory, processor, and USB3 settings - b. Allocate virtual hard drive at least 8 GB -4. Launch new virtual machine - a. Machine will fail to find operating system - b. Load Lubuntu OS image as optical drive (under device menu) -5. Install and launch Lubuntu - a. Select lightweight installation (if you don't want games and office apps) - b. Allow installation of 3rd party libraries (non-free) -6. Install guest additions in Lubuntu - a. Devices -> insert guest additions cd - b. Open terminal and navigate to mounted folder - c. sudo sh VBoxLinuxAdditions.run - d. Reboot Lubuntu -6. Open terminal and install prerequisites - a. sudo apt-get install build-essential cmake cmake-gui pkg-config gedit cairomm-1.0 libjpeg-dev libpng-dev libtiff-dev libavcodec-dev libavformat-dev libswscale-dev libv4l-dev libx264-dev libatlas-base-dev libboost-system-dev libnlopt-dev libgtk-3-dev libdc1394-22-dev -7. Install latest version of OpenCV - a. Download and extract latest source package from https://opencv.org/releases.html - b. in terminal navigate to opencv folder - c. mkdir build - d. cd build - e. cmake-gui .. - f. configure - g. set install prefix to /usr/local - h. disable building perf tests, building tests - i. disable all modules except for core, highgui, imgcodecs, imgproc, video, videoio - j. generate and close cmake-gui - k. make -j4 - l. sudo make install - m. sudo ldconfig - n. to check opencv has been installed run: pkg-config --modversion opencv -8. Download and build FicTrac source code \ No newline at end of file diff --git a/docker/fictrac_dev.dockerfile b/docker/fictrac_dev.dockerfile deleted file mode 100644 index 8db4704..0000000 --- a/docker/fictrac_dev.dockerfile +++ /dev/null @@ -1,29 +0,0 @@ -## To build, execute from current directory: -# docker build -t fictrac_dev -f .\fictrac_dev.dockerfile . -## To enable volume sharing between host/container, execute in elevated powershell: -# Set-NetConnectionProfile -InterfaceAlias "vEthernet (DockerNAT)" -NetworkCategory Private -# (restart docker for windows) -## To launch container: -# docker run -v c:\Users\richardm:/home --name fictrac_dev -ti fictrac_dev /sbin/my_init -- bash -l -## To launch privileged container (e.g. for dmesg access) -# docker run --privileged [...] -## To exit/stop container: -# exit -## To relaunch container: -# docker start -i fictrac_dev - -FROM phusion/baseimage - -# Use baseimage-docker's init system. -CMD ["/sbin/my_init"] - -ENV DISPLAY=10.0.75.1:0 -#RUN rm -f /etc/service/sshd/down -#RUN sed -i 's/.*PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config - -# Install prerequisities -RUN apt-get update -RUN apt-get -y install make pkg-config libopencv-dev libboost-all-dev libnlopt-dev libcairomm-1.0 gedit git gitk git-gui - -# Clean up APT when done. -RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* diff --git a/exec/configGui.cpp b/exec/configGui.cpp index 91c5fcc..bc588f8 100644 --- a/exec/configGui.cpp +++ b/exec/configGui.cpp @@ -64,9 +64,6 @@ int main(int argc, char *argv[]) getchar_clean(); return -1; } - - /// Init OpenCV windows - only used by GTK backend? - cvStartWindowThread(); /// Run configuration GUI. bool ret = cfg.run(); diff --git a/exec/fictrac.cpp b/exec/fictrac.cpp index 872e6fb..81ee85e 100644 --- a/exec/fictrac.cpp +++ b/exec/fictrac.cpp @@ -11,71 +11,91 @@ #include "fictrac_version.h" #include +#include +#include -using std::string; +using namespace std; + +/// Ctrl-c handling +bool _active = true; +void ctrlcHandler(int /*signum*/) { _active = false; } int main(int argc, char *argv[]) { - PRINT("///"); - PRINT("/// FicTrac:\tA webcam-based method for generating fictive paths.\n///"); - PRINT("/// Usage:\tfictrac CONFIG_FN [-v LOG_VERBOSITY]\n///"); - PRINT("/// \tCONFIG_FN\tPath to input config file (defaults to config.txt)."); - PRINT("/// \tLOG_VERBOSITY\t[Optional] One of DBG, INF, WRN, ERR."); - PRINT("///"); - PRINT("/// Version: %2d.%02d (build date: %s)", FICTRAC_VERSION_MAJOR, FICTRAC_VERSION_MINOR, __DATE__); - PRINT("///\n"); + PRINT("///"); + PRINT("/// FicTrac:\tA webcam-based method for generating fictive paths.\n///"); + PRINT("/// Usage:\tfictrac CONFIG_FN [-v LOG_VERBOSITY]\n///"); + PRINT("/// \tCONFIG_FN\tPath to input config file (defaults to config.txt)."); + PRINT("/// \tLOG_VERBOSITY\t[Optional] One of DBG, INF, WRN, ERR."); + PRINT("///"); + PRINT("/// Version: %2d.%02d (build date: %s)", FICTRAC_VERSION_MAJOR, FICTRAC_VERSION_MINOR, __DATE__); + PRINT("///\n"); /// Parse args. string log_level = "info"; string config_fn = "config.txt"; - bool should_exit_when_done = false; + bool do_test = false; for (int i = 1; i < argc; ++i) { if ((string(argv[i]) == "--verbosity") || (string(argv[i]) == "-v")) { if (++i < argc) { log_level = argv[i]; } else { - LOG_ERR("-v/--verbosity requires one argument (debug < info (default) < warn < error)!"); + LOG_ERR("-v/--verbosity requires one argument (debug < info (default) < warn < error)!"); return -1; } } - else if((string(argv[i]) == "--exit_when_done") || (string(argv[i]) == "-e")) { - should_exit_when_done = true; - } + else if ((string(argv[i]) == "--test") || (string(argv[i]) == "-t")) { + do_test = true; + } else { - config_fn = argv[i]; + config_fn = argv[i]; } } - /// Set logging level. - Logger::setVerbosity(log_level); + /// Set logging level. + Logger::setVerbosity(log_level); - //// Catch cntl-c - //signal(SIGINT, TERMINATE); + // Catch cntl-c + signal(SIGINT, ctrlcHandler); /// Set high priority (when run as SU). - if (!SetProcessHighPriority()) { - LOG_ERR("Error! Unable to set process priority!"); - } else { - LOG("Set process priority to HIGH!"); - } - - Trackball tracker(config_fn); - - /// Now Trackball has spawned our worker threads, we set this thread to low priority. - SetThreadNormalPriority(); - - // wait for tracking to finish - while (tracker.isActive()) { sleep(500); } - - //tracker.printState(); - tracker.writeTemplate(); - - if(!should_exit_when_done) { - PRINT("\n\nHit ENTER to exit.."); - getchar_clean(); - } - - return 0; -} + if (!SetProcessHighPriority()) { + LOG_ERR("Error! Unable to set process priority!"); + } + else { + LOG("Set process priority to HIGH!"); + } + + unique_ptr tracker = make_unique(config_fn); + + /// Now Trackball has spawned our worker threads, we set this thread to low priority. + SetThreadNormalPriority(); + + /// Wait for tracking to finish. + while (tracker->isActive()) { + if (!_active) { + tracker->terminate(); + } + sleep(250); + } + + /// Save the eventual template to disk. + tracker->writeTemplate(); + + /// If we're running in test mode, print some stats. + if (do_test) { + tracker->dumpState(); + } + + /// Try to force release of all objects. + tracker.reset(); + + /// Wait a bit before exiting... + sleep(250); + + //PRINT("\n\nHit ENTER to exit.."); + //getchar_clean(); + return 0; +} \ No newline at end of file diff --git a/include/CVSource.h b/include/CVSource.h index 77580ed..6f1f90e 100644 --- a/include/CVSource.h +++ b/include/CVSource.h @@ -28,4 +28,6 @@ class CVSource : public FrameSource { private: std::shared_ptr _cap; cv::Mat _frame_cap; + + bool _is_image; }; diff --git a/include/CmPoint.h b/include/CmPoint.h index 6aad8c3..e8ec4a9 100644 --- a/include/CmPoint.h +++ b/include/CmPoint.h @@ -22,9 +22,9 @@ class CmPointT { CmPointT() : x(0), y(0), z(0) {} CmPointT(T x_, T y_, T z_) : x(x_), y(y_), z(z_) {} - CmPointT(const CvPoint& p) : x(p.x), y(p.y), z(0) {} - CmPointT(const CvPoint2D32f& p) : x(p.x), y(p.y), z(0) {} - CmPointT(const CvPoint3D32f& p) : x(p.x), y(p.y), z(p.z) {} + CmPointT(const cv::Point& p) : x(p.x), y(p.y), z(0) {} + CmPointT(const cv::Point2f& p) : x(p.x), y(p.y), z(0) {} + CmPointT(const cv::Point3f& p) : x(p.x), y(p.y), z(p.z) {} CmPointT(const CmPoint32f& p) : x(p.x), y(p.y), z(p.z) {} CmPointT(const CmPoint64f& p) : x(p.x), y(p.y), z(p.z) {} CmPointT(T az, T el); @@ -32,7 +32,6 @@ class CmPointT { /// Allow implicit conversion of scalar to CmPointT for scaling CmPointT(T scale) : x(scale), y(scale), z(scale) {} - void copyTo(CvPoint3D32f& p) const { p = cvPoint3D32f(x,y,z); } void copyTo(cv::Point3f& p) const { p = cv::Point3f(static_cast(x), static_cast(y), static_cast(z)); } void copyTo(cv::Point3d& p) const { p = cv::Point3d(static_cast(x), static_cast(y), static_cast(z)); } void copyTo(float *p) const { p[0] = static_cast(x); p[1] = static_cast(y); p[2] = static_cast(z); } diff --git a/include/ConfigGui.h b/include/ConfigGui.h index eccee43..098c5b0 100644 --- a/include/ConfigGui.h +++ b/include/ConfigGui.h @@ -80,11 +80,11 @@ class ConfigGui private: bool setFrame(cv::Mat& frame); - bool updateC2ATransform(const cv::Mat& ref_cnrs, cv::Mat& R, cv::Mat& t); + bool updateRt(const std::string& ref_str, cv::Mat& R, cv::Mat& t); //void drawC2ATransform(cv::Mat& disp_frame, const cv::Mat& ref_cnrs, const cv::Mat& R, const cv::Mat& t, const double& r, const CmPoint& c); void drawC2AAxes(cv::Mat& disp_frame, const cv::Mat& R, const cv::Mat& t, const double& r, const CmPoint& c); - void drawC2ACorners(cv::Mat& disp_frame, const cv::Mat& ref_cnrs, const cv::Mat& R, const cv::Mat& t); - bool saveC2ATransform(const cv::Mat& R, const cv::Mat& t); + void drawC2ACorners(cv::Mat& disp_frame, const std::string& ref_str, const cv::Mat& R, const cv::Mat& t); + bool saveC2ATransform(const std::string& ref_str, const cv::Mat& R, const cv::Mat& t); void changeState(INPUT_MODE new_state); diff --git a/include/ConfigParser.h b/include/ConfigParser.h index 18930c0..57c7d86 100644 --- a/include/ConfigParser.h +++ b/include/ConfigParser.h @@ -9,6 +9,7 @@ #include #include #include +#include /// /// Config file parser. @@ -26,10 +27,15 @@ class ConfigParser int write() { return write(_fn); } /// Quick accessor functions - std::string operator()(std::string key); + std::string operator()(std::string key) const; template - T get(std::string key); + T get(std::string key) const { + std::stringstream ss(operator()(key)); + T val; + ss >> val; + return val; + }; /// Accessor functions bool getStr(std::string key, std::string& val); @@ -41,15 +47,19 @@ class ConfigParser bool getVVecInt(std::string key, std::vector>& val); /// Write access - template - void add(std::string key, T& val) { _data[key] = std::to_string(val); } + template< + typename T, + typename = typename std::enable_if::value, T>::type + > void add(std::string key, T& val) { _data[key] = std::to_string(val); } // special case: string void add(std::string key, std::string val) { _data[key] = val; } // special case: vector - template - void add(std::string key, std::vector& val) { + template< + typename T, + typename = typename std::enable_if::value, T>::type + > void add(std::string key, std::vector& val) { std::string str = "{ "; for (auto v : val) { str += std::to_string(v) + ", "; @@ -59,8 +69,10 @@ class ConfigParser } // super special case: vector of vectors - template - void add(std::string key, std::vector>& val) { + template< + typename T, + typename = typename std::enable_if::value, T>::type + > void add(std::string key, std::vector>& val) { std::string str = "{ "; for (auto v : val) { str += "{ "; @@ -72,6 +84,11 @@ class ConfigParser str = str.substr(0, std::max(static_cast(str.size()-2),2)) + " }"; // drop last comma _data[key] = str; } + + // erase element/s + void erase(std::string key) { + _data.erase(key); + } /// Debugging void printAll(); diff --git a/include/FrameGrabber.h b/include/FrameGrabber.h index 4fb55c7..c2059e5 100644 --- a/include/FrameGrabber.h +++ b/include/FrameGrabber.h @@ -30,19 +30,20 @@ class FrameGrabber const cv::Mat& remap_mask, double thresh_ratio, double thresh_win_pc, - int max_buf_len = 10, + std::string thresh_rgb_transform = "grey", + int max_buf_len = 1, int max_frame_cnt = -1 ); ~FrameGrabber(); void terminate(); - bool getFrameSet(cv::Mat& frame, cv::Mat& remap, double& timestamp, bool latest); - bool getLatestFrameSet(cv::Mat& frame, cv::Mat& remap, double& timestamp) { - return getFrameSet(frame, remap, timestamp, true); + bool getFrameSet(cv::Mat& frame, cv::Mat& remap, double& timestamp, double& ms_since_midnight, bool latest = true); + bool getLatestFrameSet(cv::Mat& frame, cv::Mat& remap, double& timestamp, double& ms_since_midnight) { + return getFrameSet(frame, remap, timestamp, ms_since_midnight, true); } - bool getNextFrameSet(cv::Mat& frame, cv::Mat& remap, double& timestamp) { - return getFrameSet(frame, remap, timestamp, false); + bool getNextFrameSet(cv::Mat& frame, cv::Mat& remap, double& timestamp, double& ms_since_midnight) { + return getFrameSet(frame, remap, timestamp, ms_since_midnight, false); } private: @@ -58,6 +59,12 @@ class FrameGrabber double _thresh_ratio; int _thresh_win, _thresh_rad; + enum { + GREY, + RED, + GREEN, + BLUE + } _thresh_rgb_transform; int _max_buf_len, _max_frame_cnt; @@ -69,5 +76,5 @@ class FrameGrabber /// Output queues. std::deque _frame_q, _remap_q; - std::deque _ts_q; + std::deque _ts_q, _ms_q; }; diff --git a/include/FrameSource.h b/include/FrameSource.h index 1970251..aed92b3 100644 --- a/include/FrameSource.h +++ b/include/FrameSource.h @@ -27,6 +27,7 @@ class FrameSource { int getWidth() { return _width; } int getHeight() { return _height; } double getTimestamp() { return _timestamp; } + double getMsSinceMidnight() { return _ms_since_midnight; } void setBayerType(BAYER_TYPE bayer_type) { _bayerType = bayer_type; } bool isLive() { return _live; } @@ -34,6 +35,6 @@ class FrameSource { bool _open; BAYER_TYPE _bayerType; int _width, _height; - double _timestamp, _fps; + double _timestamp, _fps, _ms_since_midnight; bool _live; }; diff --git a/include/Localiser.h b/include/Localiser.h index aedd0e8..b1f7f73 100644 --- a/include/Localiser.h +++ b/include/Localiser.h @@ -13,6 +13,7 @@ #include #include // shared_ptr +#include /// /// @@ -22,7 +23,7 @@ class Localiser : public NLoptFunc public: Localiser(nlopt_algorithm alg, double bound, double tol, int max_evals, CameraModelPtr sphere_model, const cv::Mat& sphere_map, - const cv::Mat& roi_mask, std::shared_ptr p1s_lut); + const cv::Mat& roi_mask, std::shared_ptr> p1s_lut); ~Localiser() {}; double search(cv::Mat& roi_frame, cv::Mat& R_roi, CmPoint64f& vx); @@ -36,7 +37,7 @@ class Localiser : public NLoptFunc const double* _R_roi; CameraModelPtr _sphere_model; const cv::Mat _sphere_map, _roi_mask; - std::shared_ptr _p1s_lut; + std::shared_ptr> _p1s_lut; cv::Mat _roi_frame; int _roi_w, _roi_h; }; diff --git a/include/PGRSource.h b/include/PGRSource.h index 2932839..d76311a 100644 --- a/include/PGRSource.h +++ b/include/PGRSource.h @@ -1,16 +1,22 @@ /// FicTrac http://rjdmoore.net/fictrac/ /// \file PGRSource.h -/// \brief PGR USB3 sources (Spinnaker SDK). +/// \brief PGR USB2/3 sources (FlyCapture/Spinnaker SDK). /// \author Richard Moore /// \copyright CC BY-NC-SA 3.0 -#pragma once +#if defined(PGR_USB2) || defined(PGR_USB3) -#ifdef PGR_USB3 +#pragma once #include "FrameSource.h" +#if defined(PGR_USB3) #include +#elif defined(PGR_USB2) +#include +#include +#endif // PGR_USB2/3 + #include class PGRSource : public FrameSource { @@ -24,9 +30,13 @@ class PGRSource : public FrameSource { virtual bool grab(cv::Mat& frame); private: +#if defined(PGR_USB3) Spinnaker::SystemPtr _system; Spinnaker::CameraList _camList; Spinnaker::CameraPtr _cam; +#elif defined(PGR_USB2) + std::shared_ptr _cam; +#endif // PGR_USB2/3 }; -#endif +#endif // PGR_USB2/3 diff --git a/include/RecorderInterface.h b/include/RecorderInterface.h index 0f66c98..84c0fcf 100644 --- a/include/RecorderInterface.h +++ b/include/RecorderInterface.h @@ -15,7 +15,8 @@ class RecorderInterface CLOSED, TERM, FILE, - SOCK + SOCK, + COM }; RecorderInterface() : _open(false), _type(CLOSED) {} diff --git a/include/Remapper.h b/include/Remapper.h index 7f70437..b735a31 100644 --- a/include/Remapper.h +++ b/include/Remapper.h @@ -35,7 +35,6 @@ class Remapper int getDstH() { return _dstH; } virtual void apply(const cv::Mat& src, cv::Mat& dst); - void apply(const IplImage *src, IplImage *dst); void applyC1(const unsigned char *src, unsigned char *dst, int srcStep=0, int dstStep=0); diff --git a/include/SerialRecorder.h b/include/SerialRecorder.h new file mode 100644 index 0000000..9f27fb2 --- /dev/null +++ b/include/SerialRecorder.h @@ -0,0 +1,33 @@ +/// FicTrac http://rjdmoore.net/fictrac/ +/// \file SerialRecorder.h +/// \brief Implementation of serial recorder. +/// \author Richard Moore +/// \copyright CC BY-NC-SA 3.0 + +#pragma once + +#include "RecorderInterface.h" + +#ifdef _WIN32 +#include +#endif +#include + +#include +#include + +class SerialRecorder : public RecorderInterface +{ +public: + SerialRecorder(); + ~SerialRecorder(); + + /// Interface to be overridden by implementations. + bool openRecord(std::string port_baud); + bool writeRecord(std::string s); + void closeRecord(); + +private: + std::string _port_name; + std::shared_ptr _port; +}; diff --git a/include/Serial_win.h b/include/Serial_win.h new file mode 100644 index 0000000..8a7a13e --- /dev/null +++ b/include/Serial_win.h @@ -0,0 +1,66 @@ +/** Serial.h + * + * A very simple serial port control class that does NOT require MFC/AFX. + * + * License: This source code can be used and/or modified without restrictions. + * It is provided as is and the author disclaims all warranties, expressed + * or implied, including, without limitation, the warranties of + * merchantability and of fitness for any purpose. The user must assume the + * entire risk of using the Software. + * + * @author Hans de Ruiter + * + * @version 0.1 -- 28 October 2008 + */ + +#ifndef __SERIAL_H__ +#define __SERIAL_H__ + +#include +#include + +typedef std::basic_string tstring; + +class Serial +{ +private: + HANDLE commHandle; + +public: + Serial(tstring &commPortName, int bitRate = 115200); + + virtual ~Serial(); + + /** Writes a NULL terminated string. + * + * @param buffer the string to send + * + * @return int the number of characters written + */ + int write(const char buffer[]); + + /** Writes a string of bytes to the serial port. + * + * @param buffer pointer to the buffer containing the bytes + * @param buffLen the number of bytes in the buffer + * + * @return int the number of bytes written + */ + int write(const char *buffer, int buffLen); + + /** Reads a string of bytes from the serial port. + * + * @param buffer pointer to the buffer to be written to + * @param buffLen the size of the buffer + * @param nullTerminate if set to true it will null terminate the string + * + * @return int the number of bytes read + */ + int read(char *buffer, int buffLen, bool nullTerminate = true); + + /** Flushes everything from the serial port's read buffer + */ + void flush(); +}; + +#endif diff --git a/include/SocketRecorder_win.h b/include/SocketRecorder_win.h new file mode 100644 index 0000000..fcda3c2 --- /dev/null +++ b/include/SocketRecorder_win.h @@ -0,0 +1,28 @@ +/// FicTrac http://rjdmoore.net/fictrac/ +/// \file SocketRecorder_win.h +/// \brief Windows implementation of socket recorder. +/// \author Richard Moore +/// \copyright CC BY-NC-SA 3.0 + +#pragma once + +#include "RecorderInterface.h" + +#include // WSADATA, SOCKET +#include + +class SocketRecorder : public RecorderInterface +{ +public: + SocketRecorder(); + ~SocketRecorder(); + + /// Interface to be overridden by implementations. + bool openRecord(std::string port); + bool writeRecord(std::string s); + void closeRecord(); + +private: + WSADATA _wsaData; + SOCKET _listenSocket, _clientSocket; +}; diff --git a/include/Trackball.h b/include/Trackball.h index cd903f6..7435222 100644 --- a/include/Trackball.h +++ b/include/Trackball.h @@ -32,18 +32,79 @@ /// class Trackball { +public: + /// Data. + struct DATA { + // trackball state + unsigned int cnt, seq; + CmPoint64f dr_roi, r_roi; + cv::Mat R_roi; + CmPoint64f dr_cam, r_cam; + cv::Mat R_cam; + CmPoint64f dr_lab, r_lab; + cv::Mat R_lab; + double ts, ms; + + double velx, vely, step_mag, step_dir, intx, inty, heading, posx, posy; + + // testing + double dist, ang_dist, step_avg, step_var, evals_avg; + + // constructors + DATA() + : cnt(0), seq(0), + dr_roi(CmPoint64f(0, 0, 0)), r_roi(CmPoint64f(0, 0, 0)), + dr_cam(CmPoint64f(0, 0, 0)), r_cam(CmPoint64f(0, 0, 0)), + dr_lab(CmPoint64f(0, 0, 0)), r_lab(CmPoint64f(0, 0, 0)), + ts(-1), + velx(0), vely(0), + step_mag(0), step_dir(0), + intx(0), inty(0), + heading(0), posx(0), posy(0), + dist(0), ang_dist(0), + step_avg(0), step_var(0), + evals_avg(0) + { + R_roi = cv::Mat::eye(3, 3, CV_64F); + R_cam = cv::Mat::eye(3, 3, CV_64F); + R_lab = cv::Mat::eye(3, 3, CV_64F); + } + + DATA(const DATA &d) + : cnt(d.cnt), seq(d.seq), + dr_roi(d.dr_roi), r_roi(d.r_roi), + dr_cam(d.dr_cam), r_cam(d.r_cam), + dr_lab(d.dr_lab), r_lab(d.r_lab), + ts(d.ts), + velx(d.velx), vely(d.vely), + step_mag(d.step_mag), step_dir(d.step_dir), + intx(d.intx), inty(d.inty), + heading(d.heading), posx(d.posx), posy(d.posy), + dist(d.dist), ang_dist(d.ang_dist), + step_avg(d.step_avg), step_var(d.step_var), + evals_avg(d.evals_avg) + { + R_roi = d.R_roi.clone(); + R_cam = d.R_cam.clone(); + R_lab = d.R_lab.clone(); + } + }; + public: Trackball(std::string cfg_fn); ~Trackball(); bool isActive() { return _active; } - void printState(); + void terminate() { _kill = true; } + std::shared_ptr getState(); + void dumpState(); bool writeTemplate(std::string fn = ""); private: /// Worker function. void process(); + void resetData(); void reset(); double testRotation(const double x[3]); virtual double objective(unsigned n, const double* x, double* grad) { return testRotation(x); } @@ -55,6 +116,7 @@ class Trackball private: /// Drawing struct DrawData { + unsigned int log_frame; cv::Mat src_frame, roi_frame, sphere_view, sphere_map; CmPoint64f dr_roi; cv::Mat R_roi; @@ -85,14 +147,13 @@ class Trackball std::unique_ptr _socket; private: - ConfigParser _cfg; /// Camera models and remapping. CameraModelPtr _src_model, _roi_model, _sphere_model; RemapTransformPtr _cam_to_roi; cv::Mat _roi_to_cam_R, _cam_to_lab_R; - std::shared_ptr _p1s_lut; + std::shared_ptr> _p1s_lut; /// Arrays. int _map_w, _map_h; @@ -107,34 +168,23 @@ class Trackball /// Optimisation. std::unique_ptr _localOpt, _globalOpt; double _error_thresh, _err; - bool _global_search; + bool _do_global_search; int _max_bad_frames; int _nevals; /// Program. bool _init, _reset, _clean_map; - /// Data. - unsigned int _cnt, _seq; - CmPoint64f _dr_roi, _r_roi; - cv::Mat _R_roi; - CmPoint64f _dr_cam, _r_cam; - cv::Mat _R_cam; - CmPoint64f _dr_lab, _r_lab; - cv::Mat _R_lab; - double _ts; - - double _velx, _vely, _step_mag, _step_dir, _intx, _inty, _heading, _posx, _posy; - - // test data - double _dist, _ang_dist, _step_avg, _step_var, _evals_avg; + /// Data + DATA _data; /// Data i/o. std::string _base_fn; std::unique_ptr _frameGrabber; - std::unique_ptr _log; + bool _do_sock_output, _do_com_output; + std::unique_ptr _data_log, _data_sock, _data_com, _vid_frames; /// Thread stuff. - std::atomic_bool _active; + std::atomic_bool _active, _kill, _do_reset; std::unique_ptr _thread; }; diff --git a/include/drawing.h b/include/drawing.h index e18fecd..64359b1 100644 --- a/include/drawing.h +++ b/include/drawing.h @@ -30,6 +30,9 @@ void drawCursor(cv::Mat& rgb, const cv::Point2d& pt, cv::Scalar colour); /// Draw transformed axes. void drawAxes(cv::Mat& rgb, const CameraModelPtr cam_model, const cv::Mat& R, const cv::Mat& t, const cv::Scalar colour); +/// Draw animal axis. +void drawAnimalAxis(cv::Mat& rgb, const CameraModelPtr cam_model, const cv::Mat& R, const cv::Mat& t, const double r, const cv::Scalar colour); + /// Draw rect corners. void drawRectCorners(cv::Mat& rgb, const CameraModelPtr cam_model, cv::Mat& cnrs, const cv::Scalar colour); diff --git a/include/geometry.h b/include/geometry.h index a43133f..16b0952 100644 --- a/include/geometry.h +++ b/include/geometry.h @@ -115,3 +115,6 @@ bool computeRtFromSquare_YZ(const CameraModelPtr cam_model, const std::vector& cnrs, cv::Mat& R, cv::Mat& t); + +/// Compute camera-animal R+t transform. +bool computeRtFromSquare(const CameraModelPtr cam_model, const std::string ref_str, const std::vector& cnrs, cv::Mat& R, cv::Mat& t); diff --git a/include/timing.h b/include/timing.h index afada2f..929c7a9 100644 --- a/include/timing.h +++ b/include/timing.h @@ -32,6 +32,25 @@ static double ts_ms() { return duration_cast(t1.time_since_epoch()).count() / 1000.; } +/// +/// Return ms since midnight +/// +static double ms_since_midnight() { + + static std::chrono::system_clock::time_point tmidnight; + + if (tmidnight.time_since_epoch().count() == 0) { + auto texec = std::chrono::system_clock::to_time_t(_tExec); + tm* tdate = std::localtime(&texec); + tdate->tm_hour = 0; + tdate->tm_min = 0; + tdate->tm_sec = 0; + tmidnight = std::chrono::system_clock::from_time_t(std::mktime(tdate)); + } + + return std::chrono::duration_cast(std::chrono::system_clock::now() - tmidnight).count() / 1000.; +} + /// /// Return formatted date/time string for program launch. /// @@ -57,29 +76,6 @@ static std::string execTime() return s; } -/// -/// Return formatted date/time string. -/// -static std::string dateTimeString() -{ - time_t rawtime; - struct tm* timeinfo; - - time(&rawtime); - timeinfo = localtime(&rawtime); - - char tmps[16]; - sprintf(tmps, "%4d%02d%02d_%02d%02d%02d", - timeinfo->tm_year + 1900, - timeinfo->tm_mon+1, - timeinfo->tm_mday, - timeinfo->tm_hour, - timeinfo->tm_min, - timeinfo->tm_sec); - - return std::string(tmps); -} - /// /// Return formatted date string. /// diff --git a/sample/config.txt b/sample/config.txt index 175aa69..097a5ed 100644 --- a/sample/config.txt +++ b/sample/config.txt @@ -1,24 +1,24 @@ -## FicTrac config file (build Oct 2 2018) +## FicTrac config file (build Mar 21 2019) c2a_cnrs_xy : { 191, 171, 128, 272, 20, 212, 99, 132 } -c2a_r : { -0.722443, 0.131317, 0.460878 } +c2a_r : { 0.722445, -0.131314, -0.460878 } c2a_src : c2a_cnrs_xy -c2a_t : { -0.674395, 0.389373, 2.889647 } +c2a_t : { -0.674396, 0.389373, 2.889648 } do_display : y max_bad_frames : -1 -opt_bound : 0.25 +opt_bound : 0.35 opt_do_global : n -opt_max_err : -1.000000 +opt_max_err : -1 opt_max_evals : 50 opt_tol : 0.001 +out_port : -1 q_factor : 6 -roi_c : { -0.229390, 0.099969, 0.968187 } roi_circ : { 63, 171, 81, 145, 106, 135, 150, 160 } roi_ignr : { { 96, 156, 113, 147, 106, 128, 82, 130, 81, 150 }, { 71, 213, 90, 219, 114, 218, 135, 211, 154, 196, 150, 217, 121, 228, 99, 234, 75, 225 } } -roi_r : 0.124815 save_debug : n +save_raw : n src_fn : sample.mp4 -src_fps : -1.000000 +src_fps : -1 thr_ratio : 1.25 thr_win_pc : 0.25 vfov : 45 - +vid_codec : h264 diff --git a/scripts/serial_client.py b/scripts/serial_client.py new file mode 100644 index 0000000..0ee23a0 --- /dev/null +++ b/scripts/serial_client.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python3 + +import serial + +PORT = 'COM?' # The com port to receive data +BAUD = 115200 # Baud rate used by the com port +TIMEOUT_S = 1 + +# Open the connection +with serial.Serial(PORT, BAUD, timeout=TIMEOUT_S) as com: + + # Keep receiving data until FicTrac closes + while com.is_open: + # Receive one data frame + data = com.readline() + if (not data): + break + + line = data.decode('UTF-8') + + # Tokenise + toks = line.split(", ") + + # Fixme: sometimes we read more than one line at a time, + # should handle that rather than just dropping extra data... + if ((len(toks) < 24) | (toks[0] != "FT")): + print('Bad read') + continue + + # Extract FicTrac variables + # (see https://github.com/rjdmoore/fictrac/blob/master/doc/data_header.txt for descriptions) + cnt = int(toks[1]) + dr_cam = [float(toks[2]), float(toks[3]), float(toks[4])] + err = float(toks[5]) + dr_lab = [float(toks[6]), float(toks[7]), float(toks[8])] + r_cam = [float(toks[9]), float(toks[10]), float(toks[11])] + r_lab = [float(toks[12]), float(toks[13]), float(toks[14])] + posx = float(toks[15]) + posy = float(toks[16]) + heading = float(toks[17]) + step_dir = float(toks[18]) + step_mag = float(toks[19]) + intx = float(toks[20]) + inty = float(toks[21]) + ts = float(toks[22]) + seq = int(toks[23]) + + # Do something ... + print(cnt) diff --git a/scripts/socket_client.py b/scripts/socket_client.py new file mode 100644 index 0000000..f82b83f --- /dev/null +++ b/scripts/socket_client.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 + +import socket + +HOST = '127.0.0.1' # The server's hostname or IP address +PORT = ???? # The port used by the server + +# Open the connection (FicTrac must be waiting for socket connection) +with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.connect((HOST, PORT)) + + data = "" + + # Keep receiving data until FicTrac closes + while True: + # Receive one data frame + new_data = sock.recv(1024) + if not new_data: + break + + # Decode received data + data += new_data.decode('UTF-8') + + # Find the first frame of data + endline = data.find("\n") + line = data[:endline] # copy first frame + data = data[endline+1:] # delete first frame + + # Tokenise + toks = line.split(", ") + + # Fixme: sometimes we read more than one line at a time, + # should handle that rather than just dropping extra data... + if ((len(toks) < 24) | (toks[0] != "FT")): + print('Bad read') + continue + + # Extract FicTrac variables + # (see https://github.com/rjdmoore/fictrac/blob/master/doc/data_header.txt for descriptions) + cnt = int(toks[1]) + dr_cam = [float(toks[2]), float(toks[3]), float(toks[4])] + err = float(toks[5]) + dr_lab = [float(toks[6]), float(toks[7]), float(toks[8])] + r_cam = [float(toks[9]), float(toks[10]), float(toks[11])] + r_lab = [float(toks[12]), float(toks[13]), float(toks[14])] + posx = float(toks[15]) + posy = float(toks[16]) + heading = float(toks[17]) + step_dir = float(toks[18]) + step_mag = float(toks[19]) + intx = float(toks[20]) + inty = float(toks[21]) + ts = float(toks[22]) + seq = int(toks[23]) + + # Do something ... + print(cnt) diff --git a/src/CVSource.cpp b/src/CVSource.cpp index f2363b3..46e99db 100644 --- a/src/CVSource.cpp +++ b/src/CVSource.cpp @@ -24,12 +24,14 @@ using cv::Mat; /// Constructor. /// CVSource::CVSource(std::string input) + : _is_image(false) { LOG_DBG("Source is: %s", input.c_str()); Mat test_frame; try { // try reading input as camera id - LOG_DBG("Trying source as camera id.."); + LOG_DBG("Trying source as camera id..."); + if (input.size() > 2) { throw std::exception(); } int id = std::stoi(input); _cap = std::shared_ptr(new cv::VideoCapture(id)); if (!_cap->isOpened()) { throw 0; } @@ -42,7 +44,7 @@ CVSource::CVSource(std::string input) catch (...) { try { // then try loading as video file - LOG_DBG("Trying source as video file.."); + LOG_DBG("Trying source as video file..."); _cap = std::shared_ptr(new cv::VideoCapture(input)); if (!_cap->isOpened()) { throw 0; } *_cap >> test_frame; @@ -52,14 +54,42 @@ CVSource::CVSource(std::string input) _live = false; } catch (...) { - LOG_ERR("Could not interpret source type (%s)!", input.c_str()); - _open = false; + try { + // then try loading as an image file + LOG_DBG("Trying source as image file..."); + _frame_cap = cv::imread(input); + if (_frame_cap.empty()) { throw 0; } + LOG("Using source type: image file."); + _open = true; + _live = false; + _is_image = true; + } + catch (...) { + LOG_ERR("Could not interpret source type (%s)!", input.c_str()); + _open = false; + } } } if( _open ) { - _width = static_cast(_cap->get(cv::CAP_PROP_FRAME_WIDTH)); - _height = static_cast(_cap->get(cv::CAP_PROP_FRAME_HEIGHT)); + if (_is_image) { + _width = _frame_cap.cols; + _height = _frame_cap.rows; + } + else { + _width = static_cast(_cap->get(cv::CAP_PROP_FRAME_WIDTH)); + _height = static_cast(_cap->get(cv::CAP_PROP_FRAME_HEIGHT)); + } + if (_live) { + _fps = getFPS(); // don't init fps for video files - we might want to play them back as fast as possible + + LOG("OpenCV camera source initialised (%dx%d @ %.3f fps)!", _width, _height, _fps); + } + else if (_is_image) { + LOG("OpenCV image source initialised (%dx%d)!", _width, _height); + } else { + LOG("OpenCV video source initialised (%dx%d)!", _width, _height); + } } } @@ -74,10 +104,11 @@ CVSource::~CVSource() /// double CVSource::getFPS() { - if (_open) { - _fps = _cap->get(cv::CAP_PROP_FPS); + double fps = _fps; + if (_open && _cap) { + fps = _cap->get(cv::CAP_PROP_FPS); } - return _fps; + return fps; } /// @@ -86,12 +117,16 @@ double CVSource::getFPS() bool CVSource::setFPS(double fps) { bool ret = false; - if (_open && (fps > 0)) { - _fps = fps; - if (!_cap->set(cv::CAP_PROP_FPS, _fps)) { - LOG_WRN("Warning! Failed to set device fps (attempted to set fps=%.2f).", _fps); + if (_open && _cap && (fps > 0)) { + if (!_cap->set(cv::CAP_PROP_FPS, fps)) { + LOG_WRN("Warning! Failed to set device fps (attempted to set fps=%.2f).", fps); + _fps = fps; // just set fps anyway for playback + LOG("Playback frame rate is now %.2f", _fps); + } + else { + _fps = getFPS(); + LOG("Device frame rate is now %.2f", _fps); } - else { ret = true; } } return ret; } @@ -103,7 +138,7 @@ bool CVSource::setFPS(double fps) bool CVSource::rewind() { bool ret = false; - if (_open) { + if (_open && _cap) { if (!_cap->set(cv::CAP_PROP_POS_FRAMES, 0)) { LOG_WRN("Warning! Failed to rewind source."); } else { ret = true; } @@ -117,12 +152,14 @@ bool CVSource::rewind() bool CVSource::grab(cv::Mat& frame) { if( !_open ) { return false; } - if( !_cap->read(_frame_cap) ) { + if( !_is_image && !_cap->read(_frame_cap) ) { LOG_ERR("Error grabbing image frame!"); return false; } - double ts = static_cast(ts_ms()); // backup, in case the device timestamp is junk + double ts = ts_ms(); // backup, in case the device timestamp is junk + _ms_since_midnight = ms_since_midnight(); _timestamp = _cap->get(cv::CAP_PROP_POS_MSEC); + LOG_DBG("Frame captured %dx%d%d @ %f (t_sys: %f ms, t_day: %f ms)", _frame_cap.cols, _frame_cap.rows, _frame_cap.channels(), _timestamp, ts, _ms_since_midnight); if (_timestamp <= 0) { _timestamp = ts; } @@ -130,20 +167,20 @@ bool CVSource::grab(cv::Mat& frame) if( _frame_cap.channels() == 1 ) { switch( _bayerType ) { case BAYER_BGGR: - cv::cvtColor(_frame_cap, frame, CV_BayerBG2BGR); + cv::cvtColor(_frame_cap, frame, cv::COLOR_BayerBG2BGR); break; case BAYER_GBRG: - cv::cvtColor(_frame_cap, frame, CV_BayerGB2BGR); + cv::cvtColor(_frame_cap, frame, cv::COLOR_BayerGB2BGR); break; case BAYER_GRBG: - cv::cvtColor(_frame_cap, frame, CV_BayerGR2BGR); + cv::cvtColor(_frame_cap, frame, cv::COLOR_BayerGR2BGR); break; case BAYER_RGGB: - cv::cvtColor(_frame_cap, frame, CV_BayerRG2BGR); + cv::cvtColor(_frame_cap, frame, cv::COLOR_BayerRG2BGR); break; case BAYER_NONE: default: - cv::cvtColor(_frame_cap, frame, CV_GRAY2BGR); + cv::cvtColor(_frame_cap, frame, cv::COLOR_GRAY2BGR); break; } } else { @@ -152,9 +189,9 @@ bool CVSource::grab(cv::Mat& frame) /// Correct average frame rate when reading from file. if (!_live && (_fps > 0)) { - static double prev_ts = ts - 25; // initially 40 Hz - static double av_fps = 40; // initially 40 Hz - static double sleep_ms = 25; + static double prev_ts = ts - (1000/_fps); + static double av_fps = _fps; // initially 40 Hz + static double sleep_ms = 1000/_fps; av_fps = 0.15 * av_fps + 0.85 * (1000 / (ts - prev_ts)); sleep_ms *= 0.25 * (av_fps / _fps) + 0.75; sleep(static_cast(round(sleep_ms))); diff --git a/src/ConfigGUI.cpp b/src/ConfigGUI.cpp index 919632e..dafeeea 100644 --- a/src/ConfigGUI.cpp +++ b/src/ConfigGUI.cpp @@ -18,9 +18,9 @@ #include "timing.h" #include "misc.h" #include "CVSource.h" -#ifdef PGR_USB3 +#if defined(PGR_USB2) || defined(PGR_USB3) #include "PGRSource.h" -#endif // PGR_USB3 +#endif // PGR_USB2/3 /// OpenCV individual includes required by gcc? #include @@ -141,7 +141,7 @@ void createZoomROI(Mat& zoom_roi, const Mat& frame, const Point2d& pt, int orig_ int x = frame.cols/2; if (pt.x >= 0) { x = clamp(int(pt.x - orig_dim/2 + 0.5), int(orig_dim/2), frame.cols - 1 - orig_dim); } int y = frame.rows/2; - if (pt.y >= 0) { y = clamp(int(pt.y - orig_dim/2 + 0.5), int(orig_dim/2), frame.rows - 1 - orig_dim); } + if (pt.y >= 0) { y = clamp(int(pt.y - orig_dim/2 + 0.5), 0, frame.rows - 1 - orig_dim); } Mat crop_rect = frame(cv::Rect(x, y, orig_dim, orig_dim)); cv::resize(crop_rect, zoom_roi, zoom_roi.size()); } @@ -168,8 +168,9 @@ ConfigGui::ConfigGui(string config_fn) Mat input_frame; std::shared_ptr source; if (_open) { -#ifdef PGR_USB3 +#if defined(PGR_USB2) || defined(PGR_USB3) try { + if (input_fn.size() > 2) { throw std::exception(); } // first try reading input as camera id int id = std::stoi(input_fn); source = std::make_shared(id); @@ -178,9 +179,9 @@ ConfigGui::ConfigGui(string config_fn) // then try loading as video file source = std::make_shared(input_fn); } -#else // PGR_USB3 +#else // !PGR_USB2/3 source = std::make_shared(input_fn); -#endif // PGR_USB3 +#endif // PGR_USB2/3 if (!source->isOpen()) { LOG_ERR("Error! Could not open input frame source (%s)!", input_fn.c_str()); _open = false; @@ -192,6 +193,28 @@ ConfigGui::ConfigGui(string config_fn) } } + /// Optionally enhance frame for config + bool do_enhance = false; + _cfg.getBool("enh_cfg_disp", do_enhance); + if (_open && do_enhance) { + LOG("Enhancing config image .."); + Mat maximg = input_frame.clone(); + Mat minimg = input_frame.clone(); + while (source->grab(input_frame)) { + for (int i = 0; i < input_frame.rows; i++) { + uint8_t* pmin = minimg.ptr(i); + uint8_t* pmax = maximg.ptr(i); + const uint8_t* pimg = input_frame.ptr(i); + for (int j = 0; j < input_frame.cols * input_frame.channels(); j++) { + uint8_t p = pimg[j]; + if (p > pmax[j]) { pmax[j] = p; } + if (p < pmin[j]) { pmin[j] = p; } + } + } + } + input_frame = maximg - minimg; + } + /// Create base file name for output files. _base_fn = _cfg("output_fn"); if (_base_fn.empty()) { @@ -225,7 +248,7 @@ bool ConfigGui::setFrame(Mat& frame) _frame = frame.clone(); } else if (frame.channels() == 1) { //_frame = frame.clone(); - cv::cvtColor(frame, _frame, CV_GRAY2BGR); + cv::cvtColor(frame, _frame, cv::COLOR_GRAY2BGR); } else { // uh oh, shouldn't get here LOG_ERR("Unexpected number of image channels (%d)!", frame.channels()); @@ -247,34 +270,25 @@ bool ConfigGui::setFrame(Mat& frame) } LOG("Using vfov: %f deg", vfov); - - //FIXME: support also fisheye models! - _cam_model = CameraModel::createRectilinear(static_cast(_w), static_cast(_h), vfov * CM_D2R); + + bool fisheye = false; + if (_cfg.getBool("fisheye", fisheye) && fisheye) { + _cam_model = CameraModel::createFisheye(_w, _h, vfov * CM_D2R / (double)_h, 360 * CM_D2R); + } + else { + // default to rectilinear + _cam_model = CameraModel::createRectilinear(_w, _h, vfov * CM_D2R); + } return true; } /// /// Write camera-animal transform to config file. +/// Warning: input R+t is animal to camera frame transform! /// -bool ConfigGui::saveC2ATransform(const Mat& R, const Mat& t) +bool ConfigGui::saveC2ATransform(const string& ref_str, const Mat& R, const Mat& t) { - string sqr_type = ""; - switch (_input_data.mode) { - case R_XY: - sqr_type = "c2a_cnrs_xy"; - break; - case R_YZ: - sqr_type = "c2a_cnrs_yz"; - break; - case R_XZ: - sqr_type = "c2a_cnrs_xz"; - break; - default: - LOG_ERR("Uh oh, something went wrong :-("); - return false; - } - // dump corner points to config file vector cfg_pts; for (auto p : _input_data.sqrPts) { @@ -283,13 +297,13 @@ bool ConfigGui::saveC2ATransform(const Mat& R, const Mat& t) } // write to config file - LOG("Adding c2a_src and %s to config file and writing to disk (%s) ..", sqr_type.c_str(), _config_fn.c_str()); - _cfg.add("c2a_src", sqr_type); - _cfg.add(sqr_type, cfg_pts); + LOG("Adding c2a_src and %s to config file and writing to disk (%s) ..", ref_str.c_str(), _config_fn.c_str()); + _cfg.add("c2a_src", ref_str); + _cfg.add(ref_str, cfg_pts); // dump R to config file vector cfg_r, cfg_t; - CmPoint angleAxis = CmPoint64f::matrixToOmega(R); + CmPoint angleAxis = CmPoint64f::matrixToOmega(R.t()); // transpose to get camera-animal transform for (int i = 0; i < 3; i++) { cfg_r.push_back(angleAxis[i]); cfg_t.push_back(t.at(i, 0)); @@ -320,21 +334,25 @@ bool ConfigGui::saveC2ATransform(const Mat& R, const Mat& t) /// /// Update animal coordinate frame estimate. /// -bool ConfigGui::updateC2ATransform(const Mat& ref_cnrs, Mat& R, Mat& t) +bool ConfigGui::updateRt(const string& ref_str, Mat& R, Mat& t) { - bool ret = false; - if (_input_data.newEvent) { - //FIXME: also support edge clicks! e.g.: - // double x1 = click[2 * i + 0].x; double y1 = click[2 * i + 0].y; - // double x2 = click[2 * i + 1].x; double y2 = click[2 * i + 1].y; - // double x3 = click[2 * i + 2].x; double y3 = click[2 * i + 2].y; - // double x4 = click[2 * i + 3].x; double y4 = click[2 * i + 3].y; - // double px = ((x1*y2 - y1 * x2)*(x3 - x4) - (x1 - x2)*(x3*y4 - y3 * x4)) / ((x1 - x2)*(y3 - y4) - (y1 - y2)*(x3 - x4)); - // double py = ((x1*y2 - y1 * x2)*(y3 - y4) - (y1 - y2)*(x3*y4 - y3 * x4)) / ((x1 - x2)*(y3 - y4) - (y1 - y2)*(x3 - x4)); - ret = computeRtFromSquare(_cam_model, ref_cnrs, _input_data.sqrPts, R, t); - _input_data.newEvent = false; - } - return ret; + //FIXME: also support edge clicks! e.g.: + // double x1 = click[2 * i + 0].x; double y1 = click[2 * i + 0].y; + // double x2 = click[2 * i + 1].x; double y2 = click[2 * i + 1].y; + // double x3 = click[2 * i + 2].x; double y3 = click[2 * i + 2].y; + // double x4 = click[2 * i + 3].x; double y4 = click[2 * i + 3].y; + // double px = ((x1*y2 - y1 * x2)*(x3 - x4) - (x1 - x2)*(x3*y4 - y3 * x4)) / ((x1 - x2)*(y3 - y4) - (y1 - y2)*(x3 - x4)); + // double py = ((x1*y2 - y1 * x2)*(y3 - y4) - (y1 - y2)*(x3*y4 - y3 * x4)) / ((x1 - x2)*(y3 - y4) - (y1 - y2)*(x3 - x4)); + + bool ret = false; + if (ref_str == "c2a_cnrs_xy") { + ret = computeRtFromSquare(_cam_model, XY_CNRS, _input_data.sqrPts, R, t); + } else if (ref_str == "c2a_cnrs_yz") { + ret = computeRtFromSquare(_cam_model, YZ_CNRS, _input_data.sqrPts, R, t); + } else if (ref_str == "c2a_cnrs_xz") { + ret = computeRtFromSquare(_cam_model, XZ_CNRS, _input_data.sqrPts, R, t); + } + return ret; } ///// @@ -363,12 +381,23 @@ bool ConfigGui::updateC2ATransform(const Mat& ref_cnrs, Mat& R, Mat& t) /// /// /// -void ConfigGui::drawC2ACorners(Mat& disp_frame, const Mat& ref_cnrs, const Mat& R, const Mat& t) +void ConfigGui::drawC2ACorners(Mat& disp_frame, const string& ref_str, const Mat& R, const Mat& t) { // make x4 mat for projecting corners Mat T(3, 4, CV_64F); for (int i = 0; i < 4; i++) { t.copyTo(T.col(i)); } + Mat ref_cnrs; + if (ref_str == "c2a_cnrs_xy") { + ref_cnrs = XY_CNRS; + } else if (ref_str == "c2a_cnrs_yz") { + ref_cnrs = YZ_CNRS; + } else if (ref_str == "c2a_cnrs_xz") { + ref_cnrs = XZ_CNRS; + } else { + return; + } + // project reference corners Mat p = R * ref_cnrs + T; @@ -386,6 +415,7 @@ void ConfigGui::drawC2AAxes(Mat& disp_frame, const Mat& R, const Mat& t, const d double scale = 1.0 / tan(r); Mat so = (cv::Mat_(3, 1) << c.x, c.y, c.z) * scale; drawAxes(disp_frame, _cam_model, R, so, Scalar(0, 0, 255)); + drawAnimalAxis(disp_frame, _cam_model, R, so, r, Scalar(255, 0, 0)); } } @@ -407,6 +437,10 @@ bool ConfigGui::run() /// Interactive window. cv::namedWindow("configGUI", cv::WINDOW_AUTOSIZE); cv::setMouseCallback("configGUI", onMouseEvent, &_input_data); + + /// If reconfiguring, then delete pre-computed values. + bool reconfig = false; + _cfg.getBool("reconfig", reconfig); /// Display/input loop. Mat R, t; @@ -414,24 +448,25 @@ bool ConfigGui::run() double r = -1; char key = 0; string val; - string cfg_r_src; + string c2a_src; vector cfg_pts; vector cfg_vec; vector> cfg_polys; changeState(CIRC_INIT); - const char exit_key = 0x1b; -#ifdef WIN32 - const char enter_key = 0x0d; -#else // WIN32 - const char enter_key = 0x0a; -#endif // WIN32 const int click_rad = std::max(int(_w/150+0.5), 5); Mat disp_frame, zoom_frame(ZOOM_DIM, ZOOM_DIM, CV_8UC3); const int scaled_zoom_dim = static_cast(ZOOM_DIM * ZOOM_SCL + 0.5); - while (_open && (key != exit_key)) { + while (_open && (key != 0x1b)) { // esc /// Create frame for drawing. //cv::cvtColor(_frame, disp_frame, CV_GRAY2RGB); disp_frame = _frame.clone(); + + // normalise zoom window + { + double min, max; + cv::minMaxLoc(disp_frame, &min, &max); + disp_frame = (disp_frame - min) * 255 / (max - min); + } int in; string str; @@ -442,10 +477,10 @@ bool ConfigGui::run() // test read cfg_pts.clear(); - if (_cfg.getVecDbl("roi_c", cfg_vec) && _cfg.getDbl("roi_r", r)) { + if (!reconfig && _cfg.getVecDbl("roi_c", cfg_vec) && _cfg.getDbl("roi_r", r)) { c.copy(cfg_vec.data()); LOG_DBG("Found roi_c = [%f %f %f] and roi_r = %f rad.", c[0], c[1], c[2], r); - LOG_WRN("Warning! When roi_c and roi_r are specified in the config file, roi_circ will be ignored.\nTo re-compute roi_c and roi_r, please delete these values from the config file and reconfigure."); + LOG_WRN("Warning! When roi_c and roi_r are specified in the config file, roi_circ will be ignored.\nTo re-compute roi_c and roi_r, please delete these values or set reconfig : y in the config file and reconfigure."); } else if (_cfg.getVecInt("roi_circ", cfg_pts)) { @@ -457,9 +492,25 @@ bool ConfigGui::run() /// Fit circular FoV to sphere. if (_input_data.circPts.size() >= 3) { - circleFit_camModel(_input_data.circPts, _cam_model, c, r); + if (circleFit_camModel(_input_data.circPts, _cam_model, c, r)) { + + LOG_DBG("Computed roi_c = [%f %f %f] and roi_r = %f rad from %d roi_circ points.", c[0], c[1], c[2], r, _input_data.circPts.size()); + + // save re-computed values + cfg_vec.clear(); + cfg_vec.push_back(c[0]); + cfg_vec.push_back(c[1]); + cfg_vec.push_back(c[2]); - LOG_DBG("Computed roi_c = [%f %f %f] and roi_r = %f rad from %d roi_circ points.", c[0], c[1], c[2], r, _input_data.circPts.size()); + // write to config file + LOG("Adding roi_c and roi_r to config file and writing to disk (%s) ..", _config_fn.c_str()); + _cfg.add("roi_c", cfg_vec); + _cfg.add("roi_r", r); + if (_cfg.write() <= 0) { + LOG_ERR("Error writing to config file (%s)!", _config_fn.c_str()); + _open = false; // will cause exit + } + } } } else { @@ -470,7 +521,7 @@ bool ConfigGui::run() /// Draw fitted circumference. if (r > 0) { drawCircle_camModel(disp_frame, _cam_model, c, r, Scalar(255,0,0), false); - + /// Display. cv::imshow("configGUI", disp_frame); cv::waitKey(100); //FIXME: why do we have to wait so long to make sure the frame is drawn? @@ -480,7 +531,7 @@ bool ConfigGui::run() // input loop while (true) { cv::waitKey(100); //FIXME: dirty hack - sometimes image doesn't draw, at least with this line we can just mash keys until it does - printf("\n Would you like to keep the existing spherer ROI configuration ([y]/n)? "); + printf("\n Would you like to keep the existing sphere ROI configuration ([y]/n)? "); in = getchar(); switch (in) { @@ -527,7 +578,7 @@ bool ConfigGui::run() /// Draw previous clicks. for (auto click : _input_data.circPts) { - cv::circle(disp_frame, click, click_rad, Scalar(255,255,0), 1, CV_AA); + cv::circle(disp_frame, click, click_rad, Scalar(255,255,0), 1, cv::LINE_AA); } /// Draw fitted circumference. @@ -545,7 +596,7 @@ bool ConfigGui::run() key = cv::waitKey(5); /// State machine logic. - if (key == enter_key) { + if ((key == 0x0d) || (key == 0x0a)) { // return if (_input_data.circPts.size() >= 3) { // dump circumference points, c, and r to config file cfg_pts.clear(); @@ -604,9 +655,9 @@ bool ConfigGui::run() for (unsigned int i = 0; i < _input_data.ignrPts.size(); i++) { for (unsigned int j = 0; j < _input_data.ignrPts[i].size(); j++) { if (i == _input_data.ignrPts.size()-1) { - cv::circle(disp_frame, _input_data.ignrPts[i][j], click_rad, COLOURS[i%NCOLOURS], 1, CV_AA); + cv::circle(disp_frame, _input_data.ignrPts[i][j], click_rad, COLOURS[i%NCOLOURS], 1, cv::LINE_AA); } - cv::line(disp_frame, _input_data.ignrPts[i][j], _input_data.ignrPts[i][(j+1)%_input_data.ignrPts[i].size()], COLOURS[i%NCOLOURS], 1, CV_AA); + cv::line(disp_frame, _input_data.ignrPts[i][j], _input_data.ignrPts[i][(j+1)%_input_data.ignrPts[i].size()], COLOURS[i%NCOLOURS], 1, cv::LINE_AA); } } @@ -657,9 +708,9 @@ bool ConfigGui::run() for (unsigned int i = 0; i < _input_data.ignrPts.size(); i++) { for (unsigned int j = 0; j < _input_data.ignrPts[i].size(); j++) { if (i == _input_data.ignrPts.size()-1) { - cv::circle(disp_frame, _input_data.ignrPts[i][j], click_rad, COLOURS[i%NCOLOURS], 1, CV_AA); + cv::circle(disp_frame, _input_data.ignrPts[i][j], click_rad, COLOURS[i%NCOLOURS], 1, cv::LINE_AA); } - cv::line(disp_frame, _input_data.ignrPts[i][j], _input_data.ignrPts[i][(j+1)%_input_data.ignrPts[i].size()], COLOURS[i%NCOLOURS], 1, CV_AA); + cv::line(disp_frame, _input_data.ignrPts[i][j], _input_data.ignrPts[i][(j+1)%_input_data.ignrPts[i].size()], COLOURS[i%NCOLOURS], 1, cv::LINE_AA); } } @@ -678,7 +729,7 @@ bool ConfigGui::run() key = cv::waitKey(5); /// State machine logic. - if (key == enter_key) { + if ((key == 0x0d) || (key == 0x0a)) { // return // if current poly is empty, assume we've finished if (_input_data.ignrPts.empty() || _input_data.ignrPts.back().empty()) { if (!_input_data.ignrPts.empty()) { _input_data.ignrPts.pop_back(); } @@ -720,61 +771,70 @@ bool ConfigGui::run() /// Choose method for defining animal frame. case R_INIT: + /// Check if corners specified (optional). + _input_data.sqrPts.clear(); + if (_cfg.getStr("c2a_src", c2a_src)) { + LOG_DBG("Found c2a_src: %s", c2a_src.c_str()); + + /// Load square corners from config file. + cfg_pts.clear(); + if (_cfg.getVecInt(c2a_src, cfg_pts)) { + for (unsigned int i = 1; i < cfg_pts.size(); i += 2) { + _input_data.sqrPts.push_back(cv::Point2d(cfg_pts[i - 1], cfg_pts[i])); + } + } + } + /// Load R+t transform from config file. + R.release(); // clear mat cfg_vec.clear(); - if (_cfg.getVecDbl("c2a_r", cfg_vec)) { + if (!reconfig && _cfg.getVecDbl("c2a_r", cfg_vec)) { LOG_DBG("Read c2a_r = [%f %f %f]", cfg_vec[0], cfg_vec[1], cfg_vec[2]); - R = CmPoint64f::omegaToMatrix(CmPoint(cfg_vec[0], cfg_vec[1], cfg_vec[2])); + R = CmPoint64f::omegaToMatrix(CmPoint(cfg_vec[0], cfg_vec[1], cfg_vec[2])).t(); // transpose to lab-camera transform } else { - LOG_DBG("Error reading c2a_r from config file! Re-running configuration .."); - changeState(R_SLCT); - break; + LOG_WRN("Warning! c2a_r missing from config file. Looking for corner points.."); } + t.release(); // clear mat cfg_vec.clear(); - if (_cfg.getVecDbl("c2a_t", cfg_vec)) { + if (!reconfig && _cfg.getVecDbl("c2a_t", cfg_vec)) { LOG_DBG("Read c2a_t = [%f %f %f]", cfg_vec[0], cfg_vec[1], cfg_vec[2]); t = (cv::Mat_(3, 1) << cfg_vec[0], cfg_vec[1], cfg_vec[2]); } else { - LOG_DBG("Error reading c2a_t from config file! Re-running configuration .."); - changeState(R_SLCT); - break; + LOG_WRN("Warning! c2a_t missing from config file. Looking for corner points.."); } - LOG_WRN("Warning! When c2a_r and c2a_t are specified in the config file, c2a_src and associated corners points will be ignored.\nTo re-compute c2a_r and c2a_t, please delete these values from the config file and reconfigure."); + if (R.empty() || t.empty()) { + if (!_input_data.sqrPts.empty()) { + LOG_DBG("Recomputing R+t from specified corner points..."); - /// Check also if corners specified (unnecessary). - if (_cfg.getStr("c2a_src", cfg_r_src)) { - LOG_DBG("Found c2a_src: %s", cfg_r_src.c_str()); - - /// Load square corners from config file. - cfg_pts.clear(); - if (_cfg.getVecInt(cfg_r_src, cfg_pts)) { - _input_data.sqrPts.clear(); - for (unsigned int i = 1; i < cfg_pts.size(); i += 2) { - _input_data.sqrPts.push_back(cv::Point2d(cfg_pts[i - 1], cfg_pts[i])); + /// Recompute R+t + if (updateRt(c2a_src, R, t)) { + saveC2ATransform(c2a_src, R, t); } + } + } + else { + LOG_WRN("Warning! When c2a_r and c2a_t are specified in the config file, c2a_src and associated corners points will be ignored.\nTo re-compute c2a_r and c2a_t, please delete these values or set reconfig : y in the config file and reconfigure."); + } - /// Draw previous clicks. - for (auto click : _input_data.sqrPts) { - cv::circle(disp_frame, click, click_rad, Scalar(255, 255, 0), 1, CV_AA); - } + /// If c2a_r/t missing and couldn't re-compute from specified corners points. + if (R.empty() || t.empty()) { + LOG_ERR("Error! Could not read or compute c2a_r and/or c2a_t. Re-running configuration.."); + changeState(R_SLCT); + break; + } - /// Draw reference corners. - if (cfg_r_src == "c2a_cnrs_xy") { - drawC2ACorners(disp_frame, XY_CNRS, R, t); - } - else if (cfg_r_src == "c2a_cnrs_yz") { - drawC2ACorners(disp_frame, YZ_CNRS, R, t); - } - else if (cfg_r_src == "c2a_cnrs_xz") { - drawC2ACorners(disp_frame, XZ_CNRS, R, t); - } - } + /// Draw previous clicks. + for (auto click : _input_data.sqrPts) { + cv::circle(disp_frame, click, click_rad, Scalar(255, 255, 0), 1, cv::LINE_AA); } + /// Draw reference corners. + drawC2ACorners(disp_frame, c2a_src, R, t); + /// Draw axes. drawC2AAxes(disp_frame, R, t, r, c); @@ -846,18 +906,21 @@ bool ConfigGui::run() { case 1: printf("\n\n\n XY-square method.\n\n Please click on the four corners of a square shape that is aligned with the animal's X-Y axes. The corners must be clicked in the following order: (+X,-Y), (+X,+Y), (-X,+Y), (-X,-Y). If your camera is looking down on the animal from above, then the four corners are (in order): TL, TR, BR, BL from the camera's perspective. If your camera is below the animal, then the order is TR, TL, BL, BR.\n\n Make sure the displayed axis is the correct right-handed coordinate frame!!\n\n You can hold F to mirror the axis if the handedness is incorrect.\n\n Press ENTER when you are satisfied with the animal's axis, or press ESC to exit..\n\n"); + c2a_src = "c2a_cnrs_xy"; // advance state changeState(R_XY); break; case 2: printf("\n\n\n YZ-square method.\n\n Please click on the four corners of a square shape that is aligned with the animal's Y-Z axes. The corners must be clicked in the following order: (-Y,-Z), (+Y,-Z), (+Y,+Z), (-Y,+Z). If your camera is behind the animal, then the four corners are (in order): TL, TR, BR, BL from the camera's perspective. If your camera is in front of the animal, then the order is TR, TL, BL, BR.\n\n Make sure the displayed axis is the correct right-handed coordinate frame!!\n\n You can hold F to mirror the axis if the handedness is incorrect.\n\n Press ENTER when you are satisfied with the animal's axis, or press ESC to exit..\n\n"); + c2a_src = "c2a_cnrs_yz"; // advance state changeState(R_YZ); break; case 3: printf("\n\n\n XZ-square method.\n\n Please click on the four corners of a square shape that is aligned with the animal's X-Z axes. The corners must be clicked in the following order: (+X,-Z), (-X,-Z), (-X,+Z), (+X,+Z). If your camera is to the animal's left side, then the four corners are (in order): TL, TR, BR, BL from the camera's perspective. If your camera is to the animal's right side, then the order is TR, TL, BL, BR.\n\n Make sure the displayed axis is the correct right-handed coordinate frame!!\n\n You can hold F to mirror the axis if the handedness is incorrect.\n\n Press ENTER when you are satisfied with the animal's axis, or press ESC to exit..\n\n"); + c2a_src = "c2a_cnrs_xz"; // advance state changeState(R_XZ); break; @@ -869,6 +932,7 @@ bool ConfigGui::run() // break; case 5: + c2a_src = "ext"; // advance state changeState(R_EXT); break; @@ -887,13 +951,16 @@ bool ConfigGui::run() /// Draw previous clicks. for (auto click : _input_data.sqrPts) { - cv::circle(disp_frame, click, click_rad, Scalar(255,255,0), 1, CV_AA); + cv::circle(disp_frame, click, click_rad, Scalar(255,255,0), 1, cv::LINE_AA); } /// Draw axes. if (_input_data.sqrPts.size() == 4) { - updateC2ATransform(XY_CNRS, R, t); - drawC2ACorners(disp_frame, XY_CNRS, R, t); + if (_input_data.newEvent) { + updateRt(c2a_src, R, t); + _input_data.newEvent = false; + } + drawC2ACorners(disp_frame, c2a_src, R, t); drawC2AAxes(disp_frame, R, t, r, c); } @@ -909,10 +976,10 @@ bool ConfigGui::run() key = cv::waitKey(5); /// State machine logic. - if (key == enter_key) { + if ((key == 0x0d) || (key == 0x0a)) { // return if ((_input_data.sqrPts.size() == 4) && !R.empty()) { // dump corner points to config file - if (!saveC2ATransform(R, t)) { + if (!saveC2ATransform(c2a_src, R, t)) { LOG_ERR("Error writing coordinate transform to config file!"); _open = false; // will cause exit } @@ -937,13 +1004,16 @@ bool ConfigGui::run() /// Draw previous clicks. for (auto click : _input_data.sqrPts) { - cv::circle(disp_frame, click, click_rad, Scalar(255,255,0), 1, CV_AA); + cv::circle(disp_frame, click, click_rad, Scalar(255,255,0), 1, cv::LINE_AA); } /// Draw axes. if (_input_data.sqrPts.size() == 4) { - updateC2ATransform(YZ_CNRS, R, t); - drawC2ACorners(disp_frame, YZ_CNRS, R, t); + if (_input_data.newEvent) { + updateRt(c2a_src, R, t); + _input_data.newEvent = false; + } + drawC2ACorners(disp_frame, c2a_src, R, t); drawC2AAxes(disp_frame, R, t, r, c); } @@ -959,10 +1029,10 @@ bool ConfigGui::run() key = cv::waitKey(5); /// State machine logic. - if (key == enter_key) { + if ((key == 0x0d) || (key == 0x0a)) { // return if ((_input_data.sqrPts.size() == 4) && !R.empty()) { // dump corner points to config file - if (!saveC2ATransform(R, t)) { + if (!saveC2ATransform(c2a_src, R, t)) { LOG_ERR("Error writing coordinate transform to config file!"); _open = false; // will cause exit } @@ -987,13 +1057,16 @@ bool ConfigGui::run() /// Draw previous clicks. for (auto click : _input_data.sqrPts) { - cv::circle(disp_frame, click, click_rad, Scalar(255,255,0), 1, CV_AA); + cv::circle(disp_frame, click, click_rad, Scalar(255,255,0), 1, cv::LINE_AA); } /// Draw axes. if (_input_data.sqrPts.size() == 4) { - updateC2ATransform(XZ_CNRS, R, t); - drawC2ACorners(disp_frame, XZ_CNRS, R, t); + if (_input_data.newEvent) { + updateRt(c2a_src, R, t); + _input_data.newEvent = false; + } + drawC2ACorners(disp_frame, c2a_src, R, t); drawC2AAxes(disp_frame, R, t, r, c); } @@ -1009,10 +1082,10 @@ bool ConfigGui::run() key = cv::waitKey(5); /// State machine logic. - if (key == enter_key) { + if ((key == 0x0d) || (key == 0x0a)) { // return if ((_input_data.sqrPts.size() == 4) && !R.empty()) { // dump corner points to config file - if (!saveC2ATransform(R, t)) { + if (!saveC2ATransform(c2a_src, R, t)) { LOG_ERR("Error writing coordinate transform to config file!"); _open = false; // will cause exit } @@ -1080,7 +1153,7 @@ bool ConfigGui::run() /// Exit config. case EXIT: - key = exit_key; + key = 0x1b; // esc break; } } @@ -1100,21 +1173,15 @@ bool ConfigGui::run() for (unsigned int i = 0; i < _input_data.ignrPts.size(); i++) { for (unsigned int j = 0; j < _input_data.ignrPts[i].size(); j++) { if (i == _input_data.ignrPts.size() - 1) { - cv::circle(disp_frame, _input_data.ignrPts[i][j], click_rad, COLOURS[i%NCOLOURS], 1, CV_AA); + cv::circle(disp_frame, _input_data.ignrPts[i][j], click_rad, COLOURS[i%NCOLOURS], 1, cv::LINE_AA); } - cv::line(disp_frame, _input_data.ignrPts[i][j], _input_data.ignrPts[i][(j + 1) % _input_data.ignrPts[i].size()], COLOURS[i%NCOLOURS], 1, CV_AA); + cv::line(disp_frame, _input_data.ignrPts[i][j], _input_data.ignrPts[i][(j + 1) % _input_data.ignrPts[i].size()], COLOURS[i%NCOLOURS], 1, cv::LINE_AA); } } // draw animal coordinate frame if (_input_data.sqrPts.size() == 4) { - if (cfg_r_src == "c2a_cnrs_xy") { - drawC2ACorners(disp_frame, XY_CNRS, R, t); - } else if (cfg_r_src == "c2a_cnrs_yz") { - drawC2ACorners(disp_frame, YZ_CNRS, R, t); - } else if (cfg_r_src == "c2a_cnrs_xz") { - drawC2ACorners(disp_frame, XZ_CNRS, R, t); - } + drawC2ACorners(disp_frame, c2a_src, R, t); } drawC2AAxes(disp_frame, R, t, r, c); diff --git a/src/ConfigParser.cpp b/src/ConfigParser.cpp index f7902ea..08ac665 100644 --- a/src/ConfigParser.cpp +++ b/src/ConfigParser.cpp @@ -11,7 +11,6 @@ #include #include #include -#include #include // try, catch #include // erase, remove @@ -71,20 +70,20 @@ int ConfigParser::read(string fn) } /// Tokenise - const string whitespace = ", \t\n\r"; + const string whitespace = ", \t\n"; std::size_t delim = line.find(":"); if (delim >= line.size()) { continue; } // skip blank lines string key = line.substr(0, line.find_last_not_of(whitespace, delim - 1) + 1), val = ""; try { val = line.substr(line.find_first_not_of(whitespace, delim + 1)); - //val.erase(std::remove(val.begin(), val.end(), '\r'), val.end()); // remove /r under linux + val.erase(std::remove(val.begin(), val.end(), '\r'), val.end()); // remove /r under linux } catch (...) {} // add blank values /// Add to map _data[key] = val; - LOG_DBG("Extracted key: %s val: %s", key.c_str(), val.c_str()); + LOG_DBG("Extracted key: |%s| val: |%s|", key.c_str(), val.c_str()); } /// Clean up @@ -124,9 +123,11 @@ int ConfigParser::write(string fn) } /// Write comments - f << std::endl; - for (auto c : _comments) { - f << c << std::endl; + if (_comments.size() > 0) { + f << std::endl; + for (auto c : _comments) { + f << c << std::endl; + } } /// Clean up @@ -141,29 +142,15 @@ int ConfigParser::write(string fn) /// /// /// -string ConfigParser::operator()(string key) -{ - string s = ""; - getStr(key, s); - return s; -} - -/// -/// -/// -template -T ConfigParser::get(string key) +string ConfigParser::operator()(string key) const { - T val; - string s; - if (getStr(key, s)) { - std::stringstream ss(s); - try { ss >> val; } - catch (std::exception& e) { - LOG_ERR("Error parsing config file value (%s : %s)! Error was: %s", key.c_str(), ss.str().c_str(), e.what()); - } + try { + return _data.at(key); + } + catch (...) { + LOG_DBG("Key (%s) not found.", key.c_str()); } - return val; + return ""; } /// @@ -175,7 +162,7 @@ bool ConfigParser::getStr(string key, string& val) { val = _data[key]; return true; } - LOG_WRN("Warning! Key (%s) not found.", key.c_str()); + LOG_DBG("Key (%s) not found.", key.c_str()); return false; } diff --git a/src/FrameGrabber.cpp b/src/FrameGrabber.cpp index 86c4eeb..e4671e1 100644 --- a/src/FrameGrabber.cpp +++ b/src/FrameGrabber.cpp @@ -18,11 +18,10 @@ #include #include // round +#include using cv::Mat; -using std::shared_ptr; -using std::unique_lock; -using std::mutex; +using namespace std; /// /// @@ -32,6 +31,7 @@ FrameGrabber::FrameGrabber( shared_ptr source, const Mat& remap_mask, double thresh_ratio, double thresh_win_pc, + string thresh_rgb_transform, int max_buf_len, int max_frame_cnt ) : _source(source), _remapper(remapper), _remap_mask(remap_mask), _active(false) @@ -43,12 +43,22 @@ FrameGrabber::FrameGrabber( shared_ptr source, _rh = _remapper->getDstH(); /// Thresholding. - if (thresh_ratio < 0) { + if (thresh_ratio <= 0) { LOG_WRN("Invalid thresh_ratio parameter (%f)! Defaulting to 1.0", thresh_ratio); thresh_ratio = 1.0; } _thresh_ratio = thresh_ratio; + if ((thresh_rgb_transform == "red") || (thresh_rgb_transform == "r")) { + _thresh_rgb_transform = FrameGrabber::RED; + } else if ((thresh_rgb_transform == "green") || (thresh_rgb_transform == "g")) { + _thresh_rgb_transform = FrameGrabber::GREEN; + } else if ((thresh_rgb_transform == "blue") || (thresh_rgb_transform == "b")) { + _thresh_rgb_transform = FrameGrabber::BLUE; + } else { + _thresh_rgb_transform = FrameGrabber::GREY; + } + if ((thresh_win_pc < 0) || (thresh_win_pc > 1.0)) { LOG_WRN("Invalid thresh_win parameter (%f)! Defaulting to 0.2", thresh_win_pc); thresh_win_pc = 0.2; @@ -63,7 +73,7 @@ FrameGrabber::FrameGrabber( shared_ptr source, /// Thread stuff. _active = true; - _thread = std::unique_ptr(new std::thread(&FrameGrabber::process, this)); + _thread = std::make_unique(&FrameGrabber::process, this); } /// @@ -71,7 +81,7 @@ FrameGrabber::FrameGrabber( shared_ptr source, /// FrameGrabber::~FrameGrabber() { - LOG("Closing input stream.."); + LOG("Closing input stream"); unique_lock l(_qMutex); _active = false; @@ -86,7 +96,7 @@ FrameGrabber::~FrameGrabber() /// /// /// -bool FrameGrabber::getFrameSet(Mat& frame, Mat& remap, double& timestamp, bool latest=true) +bool FrameGrabber::getFrameSet(Mat& frame, Mat& remap, double& timestamp, double& ms_since_midnight, bool latest) { unique_lock l(_qMutex); while (_active && (_frame_q.size() == 0)) { @@ -102,14 +112,17 @@ bool FrameGrabber::getFrameSet(Mat& frame, Mat& remap, double& timestamp, bool l // mutex unlocked in unique_lock dstr return false; } + + // must be frame_q.size() > 0 to get here (can be !_active) - if ((n != _remap_q.size()) || (n != _ts_q.size())) { + if ((n != _remap_q.size()) || (n != _ts_q.size()) || (n != _ms_q.size())) { LOG_ERR("Error! Input processed frame queues are misaligned!"); // drop all frames _frame_q.clear(); _remap_q.clear(); _ts_q.clear(); + _ms_q.clear(); // wake processing thread _qCond.notify_all(); @@ -122,6 +135,7 @@ bool FrameGrabber::getFrameSet(Mat& frame, Mat& remap, double& timestamp, bool l frame = _frame_q.back(); remap = _remap_q.back(); timestamp = _ts_q.back(); + ms_since_midnight = _ms_q.back(); if (n > 1) { LOG_WRN("Warning! Dropping %d frame/s from input processed frame queues!", n - 1); @@ -131,15 +145,18 @@ bool FrameGrabber::getFrameSet(Mat& frame, Mat& remap, double& timestamp, bool l _frame_q.clear(); _remap_q.clear(); _ts_q.clear(); + _ms_q.clear(); } else { frame = _frame_q.front(); remap = _remap_q.front(); timestamp = _ts_q.front(); + ms_since_midnight = _ms_q.front(); _frame_q.pop_front(); _remap_q.pop_front(); _ts_q.pop_front(); + _ms_q.pop_front(); if (n > 1) { LOG_DBG("%d frames remaining in processed frame queue.", _frame_q.size()); @@ -180,8 +197,8 @@ void FrameGrabber::process() Mat thresh_max(_rh, _rw, CV_8UC1); thresh_max.setTo(cv::Scalar::all(0)); - std::unique_ptr win_max_hist = std::unique_ptr(new uint8_t[_thresh_win]); - std::unique_ptr win_min_hist = std::unique_ptr(new uint8_t[_thresh_win]); + auto win_max_hist = std::make_unique(_thresh_win); + auto win_min_hist = std::make_unique(_thresh_win); /// Rewind to video start. _source->rewind(); @@ -192,7 +209,7 @@ void FrameGrabber::process() if (!SetThreadVeryHighPriority()) { LOG_ERR("Error! Unable to set thread priority!"); } else { - LOG("Set frame grabbing thread priority to HIGH!"); + LOG_DBG("Set frame grabbing thread priority to HIGH!"); } /// Frame grab loop. @@ -200,7 +217,7 @@ void FrameGrabber::process() while (_active) { /// Wait until we need to capture a new frame. unique_lock l(_qMutex); - while (_active && (_max_buf_len >= 0) && (_frame_q.size() >= _max_buf_len)) { + while (_active && (_max_buf_len > 0) && (_frame_q.size() >= _max_buf_len)) { _qCond.wait(l); } l.unlock(); @@ -222,6 +239,7 @@ void FrameGrabber::process() break; } double timestamp = _source->getTimestamp(); + double ms_since_midnight = _source->getMsSinceMidnight(); /// Create output remap image in the loop. Mat remap_grey(_rh, _rw, CV_8UC1); @@ -233,7 +251,28 @@ void FrameGrabber::process() memset(win_min_hist.get(), 0, _thresh_win); /// Create grey ROI frame. - cv::cvtColor(frame_bgr, frame_grey, CV_BGR2GRAY); + int from_to[2] = { 0, 0 }; + switch (_thresh_rgb_transform) { + case RED: + from_to[0] = 2; from_to[1] = 0; + cv::mixChannels(&frame_bgr, 1, &frame_grey, 1, from_to, 1); + break; + + case GREEN: + from_to[0] = 1; from_to[1] = 0; + cv::mixChannels(&frame_bgr, 1, &frame_grey, 1, from_to, 1); + break; + + case BLUE: + from_to[0] = 0; from_to[1] = 0; + cv::mixChannels(&frame_bgr, 1, &frame_grey, 1, from_to, 1); + break; + + case GREY: + default: + cv::cvtColor(frame_bgr, frame_grey, cv::COLOR_BGR2GRAY); + break; + } _remapper->apply(frame_grey, remap_grey); /// Blur image before calculating region min/max values. @@ -251,12 +290,13 @@ void FrameGrabber::process() uint8_t* pgrey = remap_blur.ptr(i); for (int j = 0; j <= _thresh_rad; j++) { if (pmask[j] < 255) { continue; } - uint8_t g = pgrey[j]; + const uint8_t& g = pgrey[j]; if ((g > max) && (g < 255)) { max = g; } // ignore overexposed regions if (g < min) { min = g; } } - win_max_hist[win_it++] = max; - win_min_hist[win_it++] = min; + win_max_hist[win_it] = max; + win_min_hist[win_it] = min; + win_it++; } // compute window min/max @@ -264,16 +304,17 @@ void FrameGrabber::process() uint8_t* pthrmin = thresh_min.data; for (int j = 0; j < _rw; j++) { for (int i = 0; i < _rh; i++) { + // add row max = 0; min = 255; if ((i + _thresh_rad) < _rh) { const uint8_t* pmask = _remap_mask.ptr(i + _thresh_rad); - uint8_t* pgrey = remap_blur.ptr(i + _thresh_rad); + const uint8_t* pgrey = remap_blur.ptr(i + _thresh_rad); for (int s = -_thresh_rad; s <= _thresh_rad; s++) { - int js = j + s; + const int js = j + s; if ((js < 0) || (js >= _rw)) { continue; } if (pmask[js] < 255) { continue; } - uint8_t g = pgrey[js]; + const uint8_t& g = pgrey[js]; if ((g > max) && (g < 255)) { max = g; } // ignore overexposed regions if (g < min) { min = g; } } @@ -283,10 +324,10 @@ void FrameGrabber::process() const uint8_t* pmask = _remap_mask.ptr(i + _thresh_rad - _rh); uint8_t* pgrey = remap_blur.ptr(i + _thresh_rad - _rh); for (int s = -_thresh_rad; s <= _thresh_rad; s++) { - int js = j + s + 1; + const int js = j + s + 1; if ((js < 0) || (js >= _rw)) { continue; } if (pmask[js] < 255) { continue; } - uint8_t g = pgrey[js]; + const uint8_t& g = pgrey[js]; if ((g > max) && (g < 255)) { max = g; } // ignore overexposed regions if (g < min) { min = g; } } @@ -337,11 +378,12 @@ void FrameGrabber::process() _frame_q.push_back(frame_bgr); _remap_q.push_back(remap_grey); _ts_q.push_back(timestamp); + _ms_q.push_back(ms_since_midnight); _qCond.notify_all(); - - LOG_DBG("Processed frame added to input queue (l = %d).", _frame_q.size()); - + size_t q_size = _frame_q.size(); l.unlock(); + + LOG_DBG("Processed frame added to input queue (l = %zd).", q_size); } LOG_DBG("Stopping frame grabbing loop!"); diff --git a/src/Localiser.cpp b/src/Localiser.cpp index 36ccd57..7c9e116 100644 --- a/src/Localiser.cpp +++ b/src/Localiser.cpp @@ -9,14 +9,14 @@ #include "Logger.h" using cv::Mat; - +using namespace std; /// /// /// Localiser::Localiser(nlopt_algorithm alg, double bound, double tol, int max_evals, CameraModelPtr sphere_model, const Mat& sphere_map, - const Mat& roi_mask, std::shared_ptr p1s_lut) + const Mat& roi_mask, shared_ptr> p1s_lut) : _bound(bound), _sphere_model(sphere_model), _sphere_map(sphere_map), _roi_mask(roi_mask), _p1s_lut(p1s_lut) { init(alg, 3); @@ -96,7 +96,7 @@ double Localiser::testRotation(const double x[3]) for (int i = 0; i < _roi_h; i++) { const uint8_t* pmask = _roi_mask.ptr(i); const uint8_t* proi = _roi_frame.ptr(i); - const double* v = &(_p1s_lut[i * _roi_w * 3]); + const double* v = &(*_p1s_lut)[i * _roi_w * 3]; for (int j = 0; j < _roi_w; j++) { if (pmask[j] < 255) { continue; } cnt++; diff --git a/src/Logger.cpp b/src/Logger.cpp index 514a7e0..fc92d27 100644 --- a/src/Logger.cpp +++ b/src/Logger.cpp @@ -18,8 +18,8 @@ Logger::Logger() { // create log writer string fn = string("fictrac-") + execTime() + ".log"; - _log = unique_ptr(new Recorder(RecorderInterface::RecordType::FILE, fn)); - _cout = unique_ptr(new Recorder(RecorderInterface::RecordType::TERM)); + _log = make_unique(RecorderInterface::RecordType::FILE, fn); + _cout = make_unique(RecorderInterface::RecordType::TERM); if (_log->is_active() && _cout->is_active()) { cout << "Initialised logging to " << fn << endl; } else { @@ -61,22 +61,21 @@ void Logger::mprintf(LogLevel lvl, string func, string format, ...) // not re-entrant lock_guard l1(log._pMutex); + // expand args + va_list args; + va_start(args, format); + vsnprintf(buf, buf_size, format.c_str(), args); + va_end(args); + // print and log if ((int)lvl >= (int)verbosity()) { - - // expand args - va_list args; - va_start(args, format); - vsnprintf(buf, buf_size, format.c_str(), args); - va_end(args); - // async printing to console log._cout->addMsg(string(buf) + "\n"); + } - // don't log display text to file - if (lvl != PRT) { - // async logging to file (with additional info) - log._log->addMsg(to_string(elapsed_secs()) + " " + func + " [" + log.LogLevelStrings[lvl] + "] " + buf + "\n"); - } + // don't log display text to file (but log everything else) + if (lvl != PRT) { + // async logging to file (with additional info) + log._log->addMsg(to_string(elapsed_secs()) + " " + func + " [" + log.LogLevelStrings[lvl] + "] " + buf + "\n"); } } diff --git a/src/PGRSource.cpp b/src/PGRSource.cpp index 2cfbbec..40c0ec5 100644 --- a/src/PGRSource.cpp +++ b/src/PGRSource.cpp @@ -1,21 +1,29 @@ /// FicTrac http://rjdmoore.net/fictrac/ /// \file PGRSource.cpp -/// \brief PGR USB3 sources (Spinnaker SDK). +/// \brief PGR USB2/3 sources (FlyCapture/Spinnaker SDK). /// \author Richard Moore /// \copyright CC BY-NC-SA 3.0 -#ifdef PGR_USB3 +#if defined(PGR_USB2) || defined(PGR_USB3) #include "PGRSource.h" #include "Logger.h" +#include "timing.h" +#if defined(PGR_USB3) +#include "SpinGenApi/SpinnakerGenApi.h" using namespace Spinnaker; +#elif defined(PGR_USB2) +using namespace FlyCapture2; +#endif // PGR_USB2/3 + using cv::Mat; PGRSource::PGRSource(int index) { try { +#if defined(PGR_USB3) // Retrieve singleton reference to system object _system = System::GetInstance(); @@ -35,7 +43,7 @@ PGRSource::PGRSource(int index) return; } else { - LOG_DBG("Found %d PGR cameras.", numCameras); + LOG_DBG("Found %d PGR cameras. Connecting to camera %d..", numCameras, index); } // Select camera @@ -44,50 +52,201 @@ PGRSource::PGRSource(int index) // Initialize camera _cam->Init(); + // set acquisition mode - needed? + { + // Retrieve GenICam nodemap + Spinnaker::GenApi::INodeMap& nodeMap = _cam->GetNodeMap(); + + // Retrieve enumeration node from nodemap + Spinnaker::GenApi::CEnumerationPtr ptrAcquisitionMode = nodeMap.GetNode("AcquisitionMode"); + if (!IsAvailable(ptrAcquisitionMode) || !IsWritable(ptrAcquisitionMode)) { + LOG_ERR("Unable to set acquisition mode to continuous (enum retrieval)!"); + return; + } + + // Retrieve entry node from enumeration node + Spinnaker::GenApi::CEnumEntryPtr ptrAcquisitionModeContinuous = ptrAcquisitionMode->GetEntryByName("Continuous"); + if (!IsAvailable(ptrAcquisitionModeContinuous) || !IsReadable(ptrAcquisitionModeContinuous)) { + LOG_ERR("Unable to set acquisition mode to continuous (entry retrieval)!"); + return; + } + + // Retrieve integer value from entry node + int64_t acquisitionModeContinuous = ptrAcquisitionModeContinuous->GetValue(); + + // Set integer value from entry node as new value of enumeration node + ptrAcquisitionMode->SetIntValue(acquisitionModeContinuous); + + LOG_DBG("Acquisition mode set to continuous."); + } + // Begin acquiring images _cam->BeginAcquisition(); // Get some params _width = _cam->Width(); _height = _cam->Height(); - _fps = _cam->AcquisitionFrameRate(); + _fps = getFPS(); +#elif defined(PGR_USB2) + LOG_DBG("Looking for camera at index %d...", index); + + BusManager busMgr; + PGRGuid guid; + Error error = busMgr.GetCameraFromIndex(index, &guid); + if (error != PGRERROR_OK) { + LOG_ERR("Error reading camera GUID!"); + return; + } + + _cam = std::make_shared(); + error = _cam->Connect(&guid); + if (error != PGRERROR_OK) { + LOG_ERR("Error connecting to camera!"); + return; + } + + CameraInfo camInfo; + error = _cam->GetCameraInfo(&camInfo); + if (error != PGRERROR_OK) { + LOG_ERR("Error retrieving camera information!"); + return; + } + else { + LOG_DBG("Connected to PGR camera (%s/%s max res: %s)", camInfo.modelName, camInfo.sensorInfo, camInfo.sensorResolution); + } + + error = _cam->StartCapture(); + if (error != PGRERROR_OK) { + LOG_ERR("Error starting video capture!"); + return; + } + + Image::SetDefaultColorProcessing(ColorProcessingAlgorithm::NEAREST_NEIGHBOR); + + // capture test image + Image testImg; + error = _cam->RetrieveBuffer(&testImg); + if (error != PGRERROR_OK) { + LOG_ERR("Error capturing image!"); + return; + } + _width = testImg.GetCols(); + _height = testImg.GetRows(); + _fps = getFPS(); +#endif // PGR_USB2/3 LOG("PGR camera initialised (%dx%d @ %.3f fps)!", _width, _height, _fps); _open = true; _live = true; } +#if defined(PGR_USB3) catch (Spinnaker::Exception& e) { LOG_ERR("Error opening capture device! Error was: %s", e.what()); } +#endif // PGR_USB3 catch (...) { LOG_ERR("Error opening capture device!"); } } +PGRSource::~PGRSource() +{ + if (_open) { + try { +#if defined(PGR_USB3) + _cam->EndAcquisition(); +#elif defined(PGR_USB2) + _cam->StopCapture(); +#endif // PGR_USB2/3 + } +#if defined(PGR_USB3) + catch (Spinnaker::Exception& e) { + LOG_ERR("Error ending acquisition! Error was: %s", e.what()); + } +#endif // PGR_USB3 + catch (...) { + LOG_ERR("Error ending acquisition!"); + } + _open = false; + } + +#if defined(PGR_USB2) + _cam->Disconnect(); +#endif // PGR_USB2 + + _cam = NULL; + +#if defined(PGR_USB3) + // Clear camera list before releasing system + _camList.Clear(); + + // Release system + _system->ReleaseInstance(); +#endif // PGR_USB3 + +} + double PGRSource::getFPS() { - // do nothing - return _fps; + double fps = _fps; + if (_open) { +#if defined(PGR_USB3) + try { + fps = _cam->AcquisitionResultingFrameRate(); + } + catch (Spinnaker::Exception& e) { + LOG_ERR("Error retrieving camera frame rate! Error was: %s", e.what()); + } + catch (...) { + LOG_ERR("Error retrieving camera frame rate!"); + } +#endif // PGR_USB3 + } + return fps; } bool PGRSource::setFPS(double fps) { - _fps = fps; - return false; + bool ret = false; + if (_open && (fps > 0)) { +#if defined(PGR_USB3) + try { + _cam->AcquisitionFrameRateEnable.SetValue(true); + _cam->AcquisitionFrameRate.SetValue(fps); + } + catch (Spinnaker::Exception& e) { + LOG_ERR("Error setting frame rate! Error was: %s", e.what()); + } + catch (...) { + LOG_ERR("Error setting frame rate!"); + } +#endif // PGR_USB3 + _fps = getFPS(); + LOG("Device frame rate is now %.2f", _fps); + ret = true; + } + return ret; } bool PGRSource::grab(cv::Mat& frame) { if( !_open ) { return NULL; } +#if defined(PGR_USB3) ImagePtr pgr_image = NULL; try { // Retrieve next received image long int timeout = _fps > 0 ? std::max(static_cast(1000), static_cast(1000. / _fps)) : 1000; // set capture timeout to at least 1000 ms pgr_image = _cam->GetNextImage(timeout); - _timestamp = _cam->Timestamp(); + double ts = ts_ms(); // backup, in case the device timestamp is junk + _ms_since_midnight = ms_since_midnight(); + _timestamp = pgr_image->GetTimeStamp(); + LOG_DBG("Frame captured %dx%d%d @ %f (t_sys: %f ms, t_day: %f ms)", pgr_image->GetWidth(), pgr_image->GetHeight(), pgr_image->GetNumChannels(), _timestamp, ts, _ms_since_midnight); + if (_timestamp <= 0) { + _timestamp = ts; + } // Ensure image completion if (pgr_image->IsIncomplete()) { @@ -96,38 +255,65 @@ bool PGRSource::grab(cv::Mat& frame) pgr_image->Release(); return false; } + } + catch (Spinnaker::Exception& e) { + LOG_ERR("Error grabbing PGR frame! Error was: %s", e.what()); + pgr_image->Release(); + return false; + } + catch (...) { + LOG_ERR("Error grabbing PGR frame!"); + pgr_image->Release(); + return false; + } + try { // Convert image ImagePtr bgr_image = pgr_image->Convert(PixelFormat_BGR8, NEAREST_NEIGHBOR); - // We have to release our original image to clear space on the buffer - pgr_image->Release(); - Mat tmp(_height, _width, CV_8UC3, bgr_image->GetData(), bgr_image->GetStride()); tmp.copyTo(frame); + // We have to release our original image to clear space on the buffer + pgr_image->Release(); + return true; } catch (Spinnaker::Exception& e) { - LOG_ERR("Error grabbing PGR frame! Error was: %s", e.what()); + LOG_ERR("Error converting PGR frame! Error was: %s", e.what()); pgr_image->Release(); return false; } -} - -PGRSource::~PGRSource() -{ - if( _open ) { - _cam->EndAcquisition(); - _open = false; - } - _cam = NULL; - - // Clear camera list before releasing system - _camList.Clear(); + catch (...) { + LOG_ERR("Error converting PGR frame!"); + pgr_image->Release(); + return false; + } +#elif defined(PGR_USB2) + Image frame_raw; + Error error = _cam->RetrieveBuffer(&frame_raw); + double ts = ts_ms(); // backup, in case the device timestamp is junk + //LOG_DBG("Frame captured %dx%d%d @ %f (%f)", pgr_image->GetWidth(), pgr_image->GetHeight(), pgr_image->GetNumChannels(), _timestamp, ts); + if (error != PGRERROR_OK) { + LOG_ERR("Error grabbing image frame!"); + return false; + } + auto timestamp = frame_raw.GetTimeStamp(); + _timestamp = timestamp.seconds * 1e3 + timestamp.microSeconds / (double)1e3; + if (_timestamp <= 0) { + _timestamp = ts; + } - // Release system - _system->ReleaseInstance(); + Image frame_bgr; + error = frame_raw.Convert(PIXEL_FORMAT_BGR, &frame_bgr); + if (error != PGRERROR_OK) { + LOG_ERR("Error converting image format!"); + return false; + } + Mat frame_cv(frame_bgr.GetRows(), frame_bgr.GetCols(), CV_8UC3, frame_bgr.GetData(), frame_bgr.GetStride()); + frame_cv.copyTo(frame); + return true; +#endif // PGR_USB2/3 } -#endif // PGR_USB3 +#endif // PGR_USB2/3 diff --git a/src/Recorder.cpp b/src/Recorder.cpp index 5425af6..c060fab 100644 --- a/src/Recorder.cpp +++ b/src/Recorder.cpp @@ -9,6 +9,7 @@ #include "TermRecorder.h" #include "FileRecorder.h" #include "SocketRecorder.h" +#include "SerialRecorder.h" #include "misc.h" // thread priority #include // cout/cerr @@ -21,13 +22,17 @@ Recorder::Recorder(RecorderInterface::RecordType type, string fn) /// Set record type. switch (type) { case RecorderInterface::RecordType::TERM: - _record = unique_ptr(new TermRecorder()); + _record = make_unique(); break; case RecorderInterface::RecordType::FILE: - _record = unique_ptr(new FileRecorder()); + _record = make_unique(); break; case RecorderInterface::RecordType::SOCK: - _record = unique_ptr(new SocketRecorder()); + _record = make_unique(); + break; + case RecorderInterface::RecordType::COM: + _record = make_unique(); + break; default: break; } @@ -35,7 +40,7 @@ Recorder::Recorder(RecorderInterface::RecordType type, string fn) /// Open record and start async recording. if (_record && _record->openRecord(fn)) { _active = true; - _thread = unique_ptr(new thread(&Recorder::processMsgQ, this)); + _thread = make_unique(&Recorder::processMsgQ, this); } else { cerr << "Error initialising recorder!" << endl; diff --git a/src/Remapper.cpp b/src/Remapper.cpp index 2439e89..5473036 100644 --- a/src/Remapper.cpp +++ b/src/Remapper.cpp @@ -88,98 +88,74 @@ void Remapper::applyF4(const float *src, float *dst, int srcStep, int dstStep) _apply(REMAP_TYPE_F4, src, dst, srcStep, dstStep); } -void Remapper::apply(const IplImage *src, IplImage *dst) -{ - /// - /// Sanity check. - /// - if (src->width!=_srcW || src->height!=_srcH) { - LOG_ERR("Error applying remapping! Unexpected source image size (%dx%d)!", src->width, src->height); - return; - } - if (dst->width!=_dstW || dst->height!=_dstH) { - LOG_ERR("Error applying remapping! Unexpected destination image size (%dx%d)!", dst->width, dst->height); - return; - } - - /// - /// Note: not enforcing nChannels consistency directly, but a test is - /// performed in _apply() but only to avoid segfaults. This is - /// to allow the likes of BayerRemap to overload the _apply() - /// method and perform a remapping using a single channel source - /// image and a three channel destination image. - /// - if (src->depth != dst->depth) { - LOG_ERR("Error applying remapping! Image depth mismatch (%d != %d).", src->depth, dst->depth); - return; - } - - switch (src->depth) { - case IPL_DEPTH_8U: - case IPL_DEPTH_8S: - switch (src->nChannels) { - case 1: _apply(REMAP_TYPE_C1, - src->imageData, dst->imageData, - src->widthStep, dst->widthStep); break; - case 3: _apply(REMAP_TYPE_C3, - src->imageData, dst->imageData, - src->widthStep, dst->widthStep); break; - case 4: _apply(REMAP_TYPE_C4, - src->imageData, dst->imageData, - src->widthStep, dst->widthStep); break; - default: break; - } - break; - case IPL_DEPTH_16U: - case IPL_DEPTH_16S: - switch (src->nChannels) { - case 1: _apply(REMAP_TYPE_S1, - src->imageData, dst->imageData, - src->widthStep, dst->widthStep); break; - case 3: _apply(REMAP_TYPE_S3, - src->imageData, dst->imageData, - src->widthStep, dst->widthStep); break; - case 4: _apply(REMAP_TYPE_S4, - src->imageData, dst->imageData, - src->widthStep, dst->widthStep); break; - default: break; - } - break; - case IPL_DEPTH_32F: - switch (src->nChannels) { - case 1: _apply(REMAP_TYPE_F1, - src->imageData, dst->imageData, - src->widthStep, dst->widthStep); break; - case 3: _apply(REMAP_TYPE_F3, - src->imageData, dst->imageData, - src->widthStep, dst->widthStep); break; - case 4: _apply(REMAP_TYPE_F4, - src->imageData, dst->imageData, - src->widthStep, dst->widthStep); break; - default: break; - } - break; - default: - LOG_ERR("Error applying remapping! Invalid data type"); - return; - } -} - void Remapper::apply(const cv::Mat& src, cv::Mat& dst) { - /// - /// Ensure destination is same type as source. - /// - if (!dst.data || dst.cols!=_dstW || dst.rows!=_dstH - || dst.type()!=src.type()) - { - dst.create(_dstH, _dstW, src.type()); - dst.setTo(cv::Scalar::all(0)); - } - - IplImage iplSrc = src; - IplImage iplDst = dst; - apply(&iplSrc, &iplDst); + /// + /// Sanity check. + /// + if (src.cols != _srcW || src.rows != _srcH) { + LOG_ERR("Error applying remapping! Unexpected source image size (%dx%d)!", src.cols, src.rows); + return; + } + + /// + /// Ensure destination is same type as source. + /// + if (dst.cols != src.cols || dst.rows != src.rows || dst.type() != src.type()) { + // if logic unnecessary for create(), but we don't want to bother clearing mem unless we allocate + dst.create(_dstH, _dstW, src.type()); + dst.setTo(cv::Scalar::all(0)); + } + + switch (src.depth()) { + case CV_8U: + case CV_8S: + switch (src.channels()) { + case 1: _apply(REMAP_TYPE_C1, + src.data, dst.data, + src.step, dst.step); break; + case 3: _apply(REMAP_TYPE_C3, + src.data, dst.data, + src.step, dst.step); break; + case 4: _apply(REMAP_TYPE_C4, + src.data, dst.data, + src.step, dst.step); break; + default: break; + } + break; + case CV_16U: + case CV_16S: + switch (src.channels()) { + case 1: _apply(REMAP_TYPE_S1, + src.data, dst.data, + src.step, dst.step); break; + case 3: _apply(REMAP_TYPE_S3, + src.data, dst.data, + src.step, dst.step); break; + case 4: _apply(REMAP_TYPE_S4, + src.data, dst.data, + src.step, dst.step); break; + default: break; + } + break; + case CV_32F: + switch (src.channels()) { + case 1: _apply(REMAP_TYPE_F1, + src.data, dst.data, + src.step, dst.step); break; + case 3: _apply(REMAP_TYPE_F3, + src.data, dst.data, + src.step, dst.step); break; + case 4: _apply(REMAP_TYPE_F4, + src.data, dst.data, + src.step, dst.step); break; + default: break; + } + break; + default: + LOG_ERR("Error applying remapping! Invalid data type"); + return; + } } diff --git a/src/SerialRecorder.cpp b/src/SerialRecorder.cpp new file mode 100644 index 0000000..4c41fe7 --- /dev/null +++ b/src/SerialRecorder.cpp @@ -0,0 +1,101 @@ +/// FicTrac http://rjdmoore.net/fictrac/ +/// \file SerialRecorder.cpp +/// \brief Implementation of serial recorder. +/// \author Richard Moore +/// \copyright CC BY-NC-SA 3.0 + +#include "SerialRecorder.h" + +#include "Logger.h" + +#include +#include + +#include + +using namespace std; +using namespace boost; + +/// +/// +/// +SerialRecorder::SerialRecorder() +{ + _type = COM; +} + +/// +/// +/// +SerialRecorder::~SerialRecorder() +{ + closeRecord(); +} + +/// +/// +/// +bool SerialRecorder::openRecord(std::string port_baud) +{ + // extract port no and baud + size_t pos = port_baud.find_first_of('@'); + if (pos == string::npos) { + LOG_ERR("Error! Malformed port:baud string."); + return false; + } + _port_name = port_baud.substr(0, pos); + int baud = stoi(port_baud.substr(pos + 1)); + + LOG("Opening serial port %s with baud rate %d", _port_name.c_str(), baud); + + // open serial port + try { + asio::io_service io; + _port = make_shared(io); + _port->open(_port_name); + //_port->set_option(asio::serial_port_base::baud_rate(baud)); + _open = _port->is_open(); + if (!_open) { throw; } + } + catch (const boost::system::system_error &e) { + LOG_ERR("Error! Could not open serial port %s @ baud rate %d. Error was %s", _port_name.c_str(), baud, boost::diagnostic_information(e).c_str()); + _open = false; + } + catch (const boost::exception &e) { + LOG_ERR("Error! Could not open serial port %s @ baud rate %d. Error was %s", _port_name.c_str(), baud, boost::diagnostic_information(e).c_str()); + _open = false; + } + catch (...) { + LOG_ERR("Error! Could not open serial port %s @ baud rate %d.", _port_name.c_str(), baud); + _open = false; + } + return _open; +} + +/// +/// +/// +bool SerialRecorder::writeRecord(string s) +{ + if (_open) { + try { + _port->write_some(asio::buffer(s)); + } + catch (const boost::exception &e) { + LOG_ERR("Error writing to serial port (%s)! Error was %s", _port_name.c_str(), boost::diagnostic_information(e).c_str()); + return false; + } + } + return _open; +} + +/// +/// +/// +void SerialRecorder::closeRecord() +{ + LOG("Closing serial port %s", _port_name.c_str()); + + _open = false; + _port->close(); +} diff --git a/src/Serial_win.src b/src/Serial_win.src new file mode 100644 index 0000000..f81972f --- /dev/null +++ b/src/Serial_win.src @@ -0,0 +1,109 @@ +/** Serial.cpp + * + * A very simple serial port control class that does NOT require MFC/AFX. + * + * @author Hans de Ruiter + * + * @version 0.1 -- 28 October 2008 + */ + +#include +using namespace std; + +#include "Serial.h" + +Serial::Serial(tstring &commPortName, int bitRate) +{ + commHandle = CreateFile(commPortName.c_str(), GENERIC_READ|GENERIC_WRITE, 0,NULL, OPEN_EXISTING, + 0, NULL); + + if(commHandle == INVALID_HANDLE_VALUE) + { + throw("ERROR: Could not open com port"); + } + else + { + // set timeouts + COMMTIMEOUTS cto = { MAXDWORD, 0, 0, 0, 0}; + DCB dcb; + if(!SetCommTimeouts(commHandle,&cto)) + { + Serial::~Serial(); + throw("ERROR: Could not set com port time-outs"); + } + + // set DCB + memset(&dcb,0,sizeof(dcb)); + dcb.DCBlength = sizeof(dcb); + dcb.BaudRate = bitRate; + dcb.fBinary = 1; + dcb.fDtrControl = DTR_CONTROL_ENABLE; + dcb.fRtsControl = RTS_CONTROL_ENABLE; + + dcb.Parity = NOPARITY; + dcb.StopBits = ONESTOPBIT; + dcb.ByteSize = 8; + + if(!SetCommState(commHandle,&dcb)) + { + Serial::~Serial(); + throw("ERROR: Could not set com port parameters"); + } + } +} + +Serial::~Serial() +{ + CloseHandle(commHandle); +} + +int Serial::write(const char *buffer) +{ + DWORD numWritten; + WriteFile(commHandle, buffer, strlen(buffer), &numWritten, NULL); + + return numWritten; +} + +int Serial::write(const char *buffer, int buffLen) +{ + DWORD numWritten; + WriteFile(commHandle, buffer, buffLen, &numWritten, NULL); + + return numWritten; +} + +int Serial::read(char *buffer, int buffLen, bool nullTerminate) +{ + DWORD numRead; + if(nullTerminate) + { + --buffLen; + } + + BOOL ret = ReadFile(commHandle, buffer, buffLen, &numRead, NULL); + + if(!ret) + { + return 0; + } + + if(nullTerminate) + { + buffer[numRead] = '\0'; + } + + return numRead; +} + +#define FLUSH_BUFFSIZE 10 + +void Serial::flush() +{ + char buffer[FLUSH_BUFFSIZE]; + int numBytes = read(buffer, FLUSH_BUFFSIZE, false); + while(numBytes != 0) + { + numBytes = read(buffer, FLUSH_BUFFSIZE, false); + } +} diff --git a/src/SocketRecorder.cpp b/src/SocketRecorder.cpp index c993054..391d237 100644 --- a/src/SocketRecorder.cpp +++ b/src/SocketRecorder.cpp @@ -6,12 +6,7 @@ #include "SocketRecorder.h" -#include -#include - -#if (defined (WIN32)) #include "zhelpers.hpp" -#endif #include #include diff --git a/src/SocketRecorder_linux.src b/src/SocketRecorder_linux.src index 6b0d972..26ef9ec 100644 --- a/src/SocketRecorder_linux.src +++ b/src/SocketRecorder_linux.src @@ -6,6 +6,8 @@ #include "SocketRecorder_linux.h" +#include "Logger.h" + #include #include #include @@ -44,7 +46,7 @@ bool SocketRecorder::openRecord(std::string port) // Create socket _listenSocket = socket(AF_INET, SOCK_STREAM, 0); if (_listenSocket < 0) { - std::cerr << "Error! Could not create valid socket on port " << port << "!" << std::endl; + LOG_ERR("Error! Could not create valid socket on port %s.", port.c_str()); return false; } @@ -55,28 +57,29 @@ bool SocketRecorder::openRecord(std::string port) portno = atoi(port.c_str()); } catch (...) { - std::cerr << "Error! Invalid socket port number (" << port << ")!" << std::endl; + LOG_ERR("Error! Invalid socket port number (%s).", port.c_str()); return false; } serv_addr.sin_family = AF_INET; serv_addr.sin_addr.s_addr = INADDR_ANY; serv_addr.sin_port = htons(portno); if (bind(_listenSocket, (struct sockaddr *) &serv_addr, sizeof(serv_addr)) < 0) { - std::cerr << "Error! Failed to bind socket on port " << port << "!" << std::endl; + LOG_ERR("Error! Failed to bind socket on port %s.", port.c_str()); return false; } // Listen on our socket if (listen(_listenSocket,5) < 0) { - std::cerr << "Error! Failed to listen to socket on port " << port << "!" << std::endl; + LOG_ERR("Error! Failed to listen to socket on port %s.", port.c_str()); return false; } // Wait for client connection... socklen_t clilen = sizeof(cli_addr); + PRINT("\nWaiting for client connection to socket: %s ...\n", port.c_str()); _clientSocket = accept(_listenSocket, (struct sockaddr *) &cli_addr, &clilen); // blocking if (_clientSocket < 0) { - std::cerr << "Error! Failed to accept socket connection on port " << port << "!" << std::endl; + LOG_ERR("Error! Failed to accept socket connection on port %s.", port.c_str()); return false; } @@ -91,7 +94,7 @@ bool SocketRecorder::writeRecord(std::string s) if (_open) { int n = write(_clientSocket,s.c_str(),s.size()); if (n < 0) { - std::cerr << "Error! Send failed!" << std::endl; + LOG_ERR("Error! Send failed."); _open = false; // should this be a terminal error? } } diff --git a/src/SocketRecorder_winsocket.src b/src/SocketRecorder_win.src similarity index 69% rename from src/SocketRecorder_winsocket.src rename to src/SocketRecorder_win.src index 3cb2e07..243d1e9 100644 --- a/src/SocketRecorder_winsocket.src +++ b/src/SocketRecorder_win.src @@ -1,14 +1,15 @@ /// FicTrac http://rjdmoore.net/fictrac/ -/// \file SocketRecorder_winsocket.cpp +/// \file SocketRecorder_win.cpp /// \brief Windows implementation of socket recorder. /// \author Richard Moore /// \copyright CC BY-NC-SA 3.0 -#include "SocketRecorder_winsocket.h" +#include "SocketRecorder_win.h" + +#include "Logger.h" #include #include -#include // cout/cerr /// /// @@ -35,7 +36,7 @@ bool SocketRecorder::openRecord(std::string port) // Initialize Winsock int iResult = WSAStartup(MAKEWORD(2, 2), &_wsaData); if (iResult != 0) { - std::cerr << "Error! Failed to initialise WinSock library (err = " << iResult << ")!" << std::endl; + LOG_ERR("Error! Failed to initialise WinSock library (err = %d)", iResult); return false; } @@ -50,7 +51,7 @@ bool SocketRecorder::openRecord(std::string port) // Resolve the local address and port to be used by the server iResult = getaddrinfo(NULL, port.c_str(), &hints, &result); if (iResult != 0) { - std::cerr << "Error! Failed to resolve local address (err = " << iResult << ")!" << std::endl; + LOG_ERR("Error! Failed to resolve local address (err = %d).", iResult); return false; } @@ -58,7 +59,7 @@ bool SocketRecorder::openRecord(std::string port) _listenSocket = socket(result->ai_family, result->ai_socktype, result->ai_protocol); if (_listenSocket == INVALID_SOCKET) { //FIXME: include IP address in error msg. - std::cerr << "Error! Could not create valid socket on port " << port << " (err = " << WSAGetLastError() << ")!" << std::endl; + LOG_ERR("Error! Could not create valid socket on port %s (err = %d).", port.c_str(), WSAGetLastError()); freeaddrinfo(result); return false; } @@ -66,7 +67,7 @@ bool SocketRecorder::openRecord(std::string port) // Setup the TCP listening socket iResult = bind(_listenSocket, result->ai_addr, (int)result->ai_addrlen); if (iResult == SOCKET_ERROR) { - std::cerr << "Error! Failed to bind socket on port " << port << " (err = " << WSAGetLastError() << ")!" << std::endl; + LOG_ERR("Error! Failed to bind socket on port %s (err = %d).", port.c_str(), WSAGetLastError()); freeaddrinfo(result); return false; } @@ -76,17 +77,16 @@ bool SocketRecorder::openRecord(std::string port) // Listen on our socket. if (listen(_listenSocket, SOMAXCONN) == SOCKET_ERROR) { - std::cerr << "Error! Failed to listen to socket on port " << port << " (err = " << WSAGetLastError() << ")!" << std::endl; + LOG_ERR("Error! Failed to listen to socket on port %s (err = %d).", port.c_str(), WSAGetLastError()); return false; } // Wait for client connection... //FIXME: include IP:port info in this message - std::cout << "\nWaiting for client connection to socket: " << port << " ..." << std::endl; - std::fflush(stdout); + PRINT("\nWaiting for client connection to socket: %s ...\n", port.c_str()); _clientSocket = accept(_listenSocket, NULL, NULL); // blocking if (_clientSocket == INVALID_SOCKET) { - std::cerr << "Error! Failed to accept socket connection (err = " << WSAGetLastError() << ")!" << std::endl; + LOG_ERR("Error! Failed to accept socket connection (err = %d).", WSAGetLastError()); return false; } @@ -101,7 +101,7 @@ bool SocketRecorder::writeRecord(std::string s) if (_open) { int iSendResult = send(_clientSocket, s.c_str(), s.size(), 0); if (iSendResult == SOCKET_ERROR) { - std::cerr << "Error! Send failed (err = " << WSAGetLastError() << ")!" << std::endl; + LOG_ERR("Error! Send failed (err = %d).", WSAGetLastError()); _open = false; // should this be a terminal error? } } diff --git a/src/SquareRT.cpp b/src/SquareRT.cpp index cf8fe1f..fe7830b 100644 --- a/src/SquareRT.cpp +++ b/src/SquareRT.cpp @@ -34,8 +34,8 @@ SquareRT::SquareRT(const vector& cnrs, const Mat& ref_cnrs) : _corners(cnrs), _ref_corners(ref_cnrs) { // tx ty tz r_az r_el r_mag - double lb[6] = {-CM_PI, -CM_PI, -CM_PI, -1e3, -1e3, 0}; - double ub[6] = {CM_PI, CM_PI, CM_PI, 1e3, 1e3, 1e3}; + double lb[6] = {-CM_PI, -CM_PI, -CM_PI, -1e4, -1e4, 0}; + double ub[6] = {CM_PI, CM_PI, CM_PI, 1e4, 1e4, 1e4}; init(NLOPT_GN_CRS2_LM, 6); setLowerBounds(lb); setUpperBounds(ub); diff --git a/src/Trackball.cpp b/src/Trackball.cpp index 47b8fa7..5e003ae 100644 --- a/src/Trackball.cpp +++ b/src/Trackball.cpp @@ -16,9 +16,9 @@ #include "BasicRemapper.h" #include "misc.h" #include "CVSource.h" -#ifdef PGR_USB3 +#if defined(PGR_USB2) || defined(PGR_USB3) #include "PGRSource.h" -#endif // PGR_USB3 +#endif // PGR_USB2/3 /// OpenCV individual includes required by gcc? #include @@ -30,27 +30,16 @@ #include #include -using std::string; -using std::unique_ptr; -using std::shared_ptr; -using std::unique_lock; -using std::lock_guard; -using std::mutex; -using std::vector; -using std::deque; -using cv::Mat; -using cv::Scalar; -using cv::Rect; -using cv::Point2d; -using cv::Point2i; - -const int DRAW_SPHERE_HIST_LENGTH = 250; +using namespace cv; +using namespace std; + +const int DRAW_SPHERE_HIST_LENGTH = 1024; const int DRAW_CELL_DIM = 160; const int DRAW_FICTIVE_PATH_LENGTH = 1000; const int Q_FACTOR_DEFAULT = 6; const double OPT_TOL_DEFAULT = 1e-3; -const double OPT_BOUND_DEFAULT = 0.25; +const double OPT_BOUND_DEFAULT = 0.35; const int OPT_MAX_EVAL_DEFAULT = 50; const bool OPT_GLOBAL_SEARCH_DEFAULT = false; const int OPT_MAX_BAD_FRAMES_DEFAULT = -1; @@ -60,10 +49,21 @@ const double THRESH_WIN_PC_DEFAULT = 0.25; const uint8_t SPHERE_MAP_FIRST_HIT_BONUS = 64; +const int COM_BAUD_DEFAULT = 115200; + const bool DO_DISPLAY_DEFAULT = true; const bool SAVE_RAW_DEFAULT = false; const bool SAVE_DEBUG_DEFAULT = false; +/// OpenCV codecs for video writing +const vector> CODECS = { + {"h264", "H264", "avi"}, + {"xvid", "XVID", "avi"}, + {"mpg4", "MP4V", "mp4"}, + {"mjpg", "MJPG", "avi"}, + {"raw", "", "avi"} +}; + /// /// /// @@ -87,8 +87,11 @@ bool intersectSphere(const double camVec[3], double sphereVec[3], const double r /// /// Trackball::Trackball(string cfg_fn) - : _init(false), _reset(true), _clean_map(true), _active(true) + : _init(false), _reset(true), _clean_map(true), _active(true), _kill(false), _do_reset(false) { + /// Save execTime for outptut file naming. + string exec_time = execTime(); + /// Load and parse config file. if (_cfg.read(cfg_fn) <= 0) { LOG_ERR("Error parsing config file (%s)!", cfg_fn.c_str()); @@ -99,27 +102,28 @@ Trackball::Trackball(string cfg_fn) /// Open frame source and set fps. string src_fn = _cfg("src_fn"); shared_ptr source; -#ifdef PGR_USB3 +#if defined(PGR_USB2) || defined(PGR_USB3) try { + if (src_fn.size() > 2) { throw std::exception(); } // first try reading input as camera id int id = std::stoi(src_fn); - source = std::make_shared(id); + source = make_shared(id); } catch (...) { // then try loading as video file - source = std::make_shared(src_fn); + source = make_shared(src_fn); } -#else // PGR_USB3 - source = std::make_shared(src_fn); -#endif // PGR_USB3 +#else // !PGR_USB2/3 + source = make_shared(src_fn); +#endif // PGR_USB2/3 if (!source->isOpen()) { LOG_ERR("Error! Could not open input frame source (%s)!", src_fn.c_str()); _active = false; return; } double src_fps = -1; - if (_cfg.getDbl("src_fps", src_fps) && (src_fps >= 0)) { - LOG("Setting source fps = %.2f..", src_fps); + if (_cfg.getDbl("src_fps", src_fps) && (src_fps > 0)) { + LOG("Attempting to set source fps to %.2f", src_fps); source->setFPS(src_fps); } else { @@ -138,25 +142,33 @@ Trackball::Trackball(string cfg_fn) /// Source camera model. double vfov = -1; - if (!_cfg.getDbl("vfov", vfov) || vfov <= 0) { + if (!_cfg.getDbl("vfov", vfov) || (vfov <= 0)) { LOG_ERR("Error! Camera vertical FoV parameter specified in the config file (vfov) is invalid!"); _active = false; return; } - //FIXME: support other camera models - _src_model = CameraModel::createRectilinear(source->getWidth(), source->getHeight(), vfov * CM_D2R); + bool fisheye = false; + if (_cfg.getBool("fisheye", fisheye) && fisheye) { + _src_model = CameraModel::createFisheye(source->getWidth(), source->getHeight(), vfov * CM_D2R / (double)source->getHeight(), 360 * CM_D2R); + } + else { + // default to rectilinear + _src_model = CameraModel::createRectilinear(source->getWidth(), source->getHeight(), vfov * CM_D2R); + } /// Dimensions - quality defaults to 6 (remap_dim 60x60, sphere_dim 180x90). int q_factor = Q_FACTOR_DEFAULT; - if (!_cfg.getInt("q_factor", q_factor)) { + if (!_cfg.getInt("q_factor", q_factor) || (q_factor <= 0)) { LOG_WRN("Warning! Resolution parameter specified in the config file (q_factor) is invalid! Using default value (%d).", q_factor); _cfg.add("q_factor", q_factor); } - _roi_w = _roi_h = 10 * q_factor; + _roi_w = _roi_h = std::min(10 * q_factor,source->getWidth()); _map_h = static_cast(1.5 * _roi_h); _map_w = 2 * _map_h; /// Load sphere config and mask. + bool reconfig = false; + //_cfg.getBool("reconfig", reconfig); // ignore saved roi_c, roi_r, c2a_r, and c2a_t values and recompute from pixel coords - dangerous!! Mat src_mask(source->getHeight(), source->getWidth(), CV_8UC1); src_mask.setTo(Scalar::all(0)); { @@ -164,9 +176,9 @@ Trackball::Trackball(string cfg_fn) _sphere_rad = -1; vector circ_pxs; vector sphere_c; - if (_cfg.getVecDbl("roi_c", sphere_c) && _cfg.getDbl("roi_r", _sphere_rad)) { + if (!reconfig && _cfg.getVecDbl("roi_c", sphere_c) && _cfg.getDbl("roi_r", _sphere_rad)) { _sphere_c.copy(sphere_c.data()); - LOG("Found sphere ROI centred at [%f %f %f], with radius %f rad.", _sphere_c[0], _sphere_c[1], _sphere_c[2], _sphere_rad); + LOG_DBG("Found sphere ROI centred at [%f %f %f], with radius %f rad.", _sphere_c[0], _sphere_c[1], _sphere_c[2], _sphere_rad); } else if (_cfg.getVecInt("roi_circ", circ_pxs)) { vector circ_pts; @@ -176,7 +188,7 @@ Trackball::Trackball(string cfg_fn) // fit circular fov if ((circ_pts.size() >= 3) && circleFit_camModel(circ_pts, _src_model, _sphere_c, _sphere_rad)) { - LOG("Computed sphere ROI centred at [%f %f %f], with radius %f rad from %d roi_circ points.", + LOG_WRN("Warning! Re-computed sphere ROI centred at [%f %f %f], with radius %f rad from %d roi_circ points.", _sphere_c[0], _sphere_c[1], _sphere_c[2], _sphere_rad, circ_pts.size()); } } @@ -186,7 +198,7 @@ Trackball::Trackball(string cfg_fn) _r_d_ratio = sin(_sphere_rad); /// Allow sphere region in mask. - shared_ptr> int_circ = projCircleInt(_src_model, _sphere_c, _sphere_rad); + auto int_circ = projCircleInt(_src_model, _sphere_c, _sphere_rad * 0.975f); // crop a bit of the circle to avoid circumference thresholding issues cv::fillConvexPoly(src_mask, *int_circ, CV_RGB(255, 255, 255)); /// Mask out ignore regions. @@ -205,7 +217,7 @@ Trackball::Trackball(string cfg_fn) cv::fillPoly(src_mask, ignr_polys_pts, CV_RGB(0, 0, 0)); } else { - LOG_WRN("Warning! No valid mask ignore regions specified in config file (roi_ignr)!"); + LOG_DBG("No valid mask ignore regions specified in config file (roi_ignr)!"); } /// Sphere config read successfully. @@ -218,32 +230,43 @@ Trackball::Trackball(string cfg_fn) } } - /* - * The model sphere has it's own coordinate system, because we arbitrarily set - * the view vector corresponding to the centre of the projection of the tracking - * ball to be the principal axis of the virtual camera (cam_model). - * - * All incoming and outgoing vectors and matrices must thus be transposed to/from - * this coordinate system: lab <= (_cam_to_lab_R) => cam <= (_roi_to_cam_R) => roi. - */ - /// Create coordinate frame transformation matrices. - CmPoint64f roi_to_cam_r, cam_to_lab_r; + CmPoint64f roi_to_cam_r; { // ROI to cam transformation from sphere centre ray. CmPoint64f z(0, 0, 1); // forward in camera coords roi_to_cam_r = _sphere_c.getRotationTo(z); // find axis-angle to rotate sphere centre to camera centre. - _roi_to_cam_R = CmPoint64f::omegaToMatrix(roi_to_cam_r); + /*_roi_to_cam_R = CmPoint64f::omegaToMatrix(roi_to_cam_r);*/ LOG_DBG("roi_to_cam_r: %.4f %.4f %.4f", roi_to_cam_r[0], roi_to_cam_r[1], roi_to_cam_r[2]); // Cam to lab transformation from configuration. vector c2a_r; - if (_cfg.getVecDbl("c2a_r", c2a_r) && (c2a_r.size() == 3)) { - cam_to_lab_r = CmPoint64f(c2a_r[0], c2a_r[1], c2a_r[2]); + string c2a_src; + vector c2a_pts; + if (!reconfig && _cfg.getVecDbl("c2a_r", c2a_r) && (c2a_r.size() == 3)) { + CmPoint64f cam_to_lab_r = CmPoint64f(c2a_r[0], c2a_r[1], c2a_r[2]); _cam_to_lab_R = CmPoint64f::omegaToMatrix(cam_to_lab_r); + LOG_DBG("Found C2A rotational transform: [%f %f %f].", cam_to_lab_r[0], cam_to_lab_r[1], cam_to_lab_r[2]); } - else { + else if (_cfg.getStr("c2a_src", c2a_src) && _cfg.getVecInt(c2a_src, c2a_pts)) { + // c2a source and pixel coords present - recompute transform + vector cnrs; + for (unsigned int i = 1; i < c2a_pts.size(); i += 2) { + cnrs.push_back(cv::Point2d(c2a_pts[i - 1], c2a_pts[i])); + } + Mat t; + if (computeRtFromSquare(_src_model, c2a_src.substr(c2a_src.size() - 2), cnrs, _cam_to_lab_R, t)) { + _cam_to_lab_R = _cam_to_lab_R.t(); // transpose to convert to camera-lab transform + CmPoint64f cam_to_lab_r = CmPoint64f::matrixToOmega(_cam_to_lab_R); + LOG_WRN("Warning! Re-computed C2A rotational transform [%f %f %f] using %s.", cam_to_lab_r[0], cam_to_lab_r[1], cam_to_lab_r[2], c2a_src.c_str()); + } + else { + LOG_ERR("Error! Camera-to-lab coordinate tranformation specified in config file (c2a_r) is invalid!"); + _active = false; + return; + } + } else { LOG_ERR("Error! Camera-to-lab coordinate tranformation specified in config file (c2a_r) is invalid!"); _active = false; return; @@ -260,10 +283,9 @@ Trackball::Trackball(string cfg_fn) _roi_mask.create(_roi_h, _roi_w, CV_8UC1); _roi_mask.setTo(cv::Scalar::all(255)); remapper->apply(src_mask, _roi_mask); - erode(_roi_mask, _roi_mask, Mat(), cv::Point(-1, -1), 1, cv::BORDER_CONSTANT, 0); // remove edge effects /// Surface mapping. - _sphere_model = CameraModel::createEquiArea(_map_w, _map_h); + _sphere_model = CameraModel::createEquiArea(_map_w, _map_h, CM_PI_2, -CM_PI, CM_PI, -2 * CM_PI); /// Buffers. _sphere_map.create(_map_h, _map_w, CV_8UC1); @@ -290,8 +312,7 @@ Trackball::Trackball(string cfg_fn) } /// Pre-calc view rays. - _p1s_lut = std::shared_ptr(new double[_roi_w * _roi_h * 3]); - memset(_p1s_lut.get(), 0, _roi_w * _roi_h * 3 * sizeof(double)); + _p1s_lut = make_shared>(_roi_w * _roi_h * 3, 0); for (int i = 0; i < _roi_h; i++) { uint8_t* pmask = _roi_mask.ptr(i); for (int j = 0; j < _roi_w; j++) { @@ -301,31 +322,31 @@ Trackball::Trackball(string cfg_fn) _roi_model->pixelIndexToVector(j, i, l); vec3normalise(l); - double* s = &_p1s_lut[(i * _roi_w + j) * 3]; + double* s = &(*_p1s_lut)[(i * _roi_w + j) * 3]; if (!intersectSphere(l, s, _r_d_ratio)) { pmask[j] = 128; } } } /// Read config params. double tol = OPT_TOL_DEFAULT; - if (!_cfg.getDbl("opt_tol", tol)) { + if (!_cfg.getDbl("opt_tol", tol) || (tol <= 0)) { LOG_WRN("Warning! Using default value for opt_tol (%f).", tol); _cfg.add("opt_tol", tol); } double bound = OPT_BOUND_DEFAULT; - if (!_cfg.getDbl("opt_bound", bound)) { + if (!_cfg.getDbl("opt_bound", bound) || (bound <= 0)) { LOG_WRN("Warning! Using default value for opt_bound (%f).", bound); _cfg.add("opt_bound", bound); } int max_evals = OPT_MAX_EVAL_DEFAULT; - if (!_cfg.getInt("opt_max_evals", max_evals)) { + if (!_cfg.getInt("opt_max_evals", max_evals) || (max_evals <= 0)) { LOG_WRN("Warning! Using default value for opt_max_eval (%d).", max_evals); _cfg.add("opt_max_evals", max_evals); } - _global_search = OPT_GLOBAL_SEARCH_DEFAULT; - if (!_cfg.getBool("opt_do_global", _global_search)) { - LOG_WRN("Warning! Using default value for opt_do_global (%d).", _global_search); - _cfg.add("opt_do_global", _global_search); + _do_global_search = OPT_GLOBAL_SEARCH_DEFAULT; + if (!_cfg.getBool("opt_do_global", _do_global_search)) { + LOG_WRN("Warning! Using default value for opt_do_global (%d).", _do_global_search); + _cfg.add("opt_do_global", _do_global_search ? "y" : "n"); } _max_bad_frames = OPT_MAX_BAD_FRAMES_DEFAULT; if (!_cfg.getInt("max_bad_frames", _max_bad_frames)) { @@ -333,7 +354,7 @@ Trackball::Trackball(string cfg_fn) _cfg.add("max_bad_frames", _max_bad_frames); } _error_thresh = -1; - if (!_cfg.getDbl("opt_max_err", _error_thresh) || _error_thresh < 0) { + if (!_cfg.getDbl("opt_max_err", _error_thresh) || (_error_thresh < 0)) { LOG_WRN("Warning! No optimisation error threshold specified in config file (opt_max_err) - poor matches will not be dropped!"); _cfg.add("opt_max_err", _error_thresh); } @@ -349,28 +370,54 @@ Trackball::Trackball(string cfg_fn) } /// Init optimisers. - _localOpt = unique_ptr(new Localiser( + _localOpt = make_unique( NLOPT_LN_BOBYQA, bound, tol, max_evals, _sphere_model, _sphere_map, - _roi_mask, _p1s_lut)); + _roi_mask, _p1s_lut); - _globalOpt = unique_ptr(new Localiser( + _globalOpt = make_unique( NLOPT_GN_CRS2_LM, CM_PI, tol, 1e5, _sphere_model, _sphere_map, - _roi_mask, _p1s_lut)); - - /// Frame source. - _frameGrabber = unique_ptr(new FrameGrabber( - source, - remapper, - _roi_mask, - thresh_ratio, - thresh_win_pc - )); + _roi_mask, _p1s_lut); /// Output. - string data_fn = _base_fn + "-" + execTime() + ".dat"; - _log = unique_ptr(new Recorder(RecorderInterface::RecordType::FILE, data_fn)); + string data_fn = _base_fn + "-" + exec_time + ".dat"; + _data_log = make_unique(RecorderInterface::RecordType::FILE, data_fn); + if (!_data_log->is_active()) { + LOG_ERR("Error! Unable to open output data log file (%s).", data_fn.c_str()); + _active = false; + return; + } + + int sock_port = 0; + _do_sock_output = false; + if (_cfg.getInt("sock_port", sock_port) && (sock_port > 0)) { + _data_sock = make_unique(RecorderInterface::RecordType::SOCK, std::to_string(sock_port)); + if (!_data_sock->is_active()) { + LOG_ERR("Error! Unable to open output data socket (%d).", sock_port); + _active = false; + return; + } + _do_sock_output = true; + } + + string com_port = _cfg("com_port"); + _do_com_output = false; + if (com_port.length() > 0) { + int com_baud = COM_BAUD_DEFAULT; + if (!_cfg.getInt("com_baud", com_baud)) { + LOG_WRN("Warning! Using default value for com_baud (%d).", com_baud); + _cfg.add("com_baud", com_baud); + } + + _data_com = make_unique(RecorderInterface::RecordType::COM, com_port + "@" + std::to_string(com_baud)); + if (!_data_com->is_active()) { + LOG_ERR("Error! Unable to open output data com port (%s@%d).", com_port.c_str(), com_baud); + _active = false; + return; + } + _do_com_output = true; + } /// Open socket recorder if enabled _do_socket = false; @@ -389,63 +436,120 @@ Trackball::Trackball(string cfg_fn) _do_display = DO_DISPLAY_DEFAULT; if (!_cfg.getBool("do_display", _do_display)) { LOG_WRN("Warning! Using default value for do_display (%d).", _do_display); - _cfg.add("do_display", _do_display); + _cfg.add("do_display", _do_display ? "y" : "n"); } _save_raw = SAVE_RAW_DEFAULT; if (!source->isLive() || !_cfg.getBool("save_raw", _save_raw)) { LOG_WRN("Warning! Using default value for save_raw (%d).", _save_raw); - _cfg.add("save_raw", _save_raw); + _cfg.add("save_raw", _save_raw ? "y" : "n"); } _save_debug = SAVE_DEBUG_DEFAULT; if (!_cfg.getBool("save_debug", _save_debug)) { LOG_WRN("Warning! Using default value for save_debug (%d).", _save_debug); - _cfg.add("save_debug", _save_debug); + _cfg.add("save_debug", _save_debug ? "y" : "n"); } if (_save_debug & !_do_display) { - LOG("Forcing do_display = true becase save_debug == true."); + LOG("Forcing do_display = true, becase save_debug == true."); _do_display = true; } if (_do_display) { _sphere_view.create(_map_h, _map_w, CV_8UC1); _sphere_view.setTo(Scalar::all(128)); } - if (_save_raw) { - string vid_fn = _base_fn + "-raw.mp4"; - _raw_vid.open(vid_fn, cv::VideoWriter::fourcc('H', '2', '6', '4'), source->getFPS(), cv::Size(source->getWidth(), source->getHeight())); - if (!_raw_vid.isOpened()) { - LOG_ERR("Error! Unable to open raw output video (%s).", vid_fn.c_str()); - _active = false; - return; + + // do video stuff + if (_save_raw || _save_debug) { + // find codec + int fourcc = 0; + string cstr = _cfg("vid_codec"), fext; + for (auto codec : CODECS) { + if (cstr.compare(codec[0]) == 0) { // found the codec + if (cstr.compare("raw") != 0) { // codec isn't RAW + fourcc = VideoWriter::fourcc(codec[1][0], codec[1][1], codec[1][2], codec[1][3]); + } + fext = codec[2]; + } } - } - if (_save_debug) { - string vid_fn = _base_fn + "-debug.mp4"; - _debug_vid.open(vid_fn, cv::VideoWriter::fourcc('H', '2', '6', '4'), source->getFPS(), cv::Size(4 * DRAW_CELL_DIM, 3 * DRAW_CELL_DIM)); - if (!_debug_vid.isOpened()) { - LOG_ERR("Error! Unable to open debug output video (%s).", vid_fn.c_str()); + if (fext.empty()) { + // codec not found - use default + auto codec = CODECS[0]; + cstr = codec[0]; + if (cstr.compare("raw") != 0) { // codec isn't RAW + fourcc = VideoWriter::fourcc(codec[1][0], codec[1][1], codec[1][2], codec[1][3]); + } + fext = codec[2]; + LOG_WRN("Warning! Using default value for vid_codec (%s).", cstr.c_str()); + _cfg.add("vid_codec", cstr); + } + + // raw input video + if (_save_raw) { + string vid_fn = _base_fn + "-raw-" + exec_time + "." + fext; + double fps = source->getFPS(); + if (fps <= 0) { fps = 25; } + LOG_DBG("Opening %s for video writing (%s %dx%d @ %f FPS)", vid_fn.c_str(), cstr.c_str(), source->getWidth(), source->getHeight(), fps); + _raw_vid.open(vid_fn, fourcc, fps, cv::Size(source->getWidth(), source->getHeight())); + if (!_raw_vid.isOpened()) { + LOG_ERR("Error! Unable to open raw output video (%s).", vid_fn.c_str()); + _active = false; + return; + } + } + + // debug output video + if (_save_debug) { + string vid_fn = _base_fn + "-dbg-" + exec_time + "." + fext; + double fps = source->getFPS(); + if (fps <= 0) { fps = 25; } + LOG_DBG("Opening %s for video writing (%s %dx%d @ %f FPS)", vid_fn.c_str(), cstr.c_str(), 4 * DRAW_CELL_DIM, 3 * DRAW_CELL_DIM, fps); + _debug_vid.open(vid_fn, fourcc, fps, cv::Size(4 * DRAW_CELL_DIM, 3 * DRAW_CELL_DIM)); + if (!_debug_vid.isOpened()) { + LOG_ERR("Error! Unable to open debug output video (%s).", vid_fn.c_str()); + _active = false; + return; + } + } + + // create output file containing log lines corresponding to video frames, for synching video output + string fn = _base_fn + "-vidLogFrames-" + exec_time + ".txt"; + _vid_frames = make_unique(RecorderInterface::RecordType::FILE, fn); + if (!_vid_frames->is_active()) { + LOG_ERR("Error! Unable to open output video frame number log file (%s).", fn.c_str()); _active = false; return; } } + /// Frame source. + _frameGrabber = make_unique( + source, + remapper, + _roi_mask, + thresh_ratio, + thresh_win_pc, + _cfg("thr_rgb_tfrm") + ); + /// Write all parameters back to config file. _cfg.write(); /// Data. - _cnt = 0; - _err = 0; - _intx = _inty = 0; reset(); + // not reset in resetData because they are not affected by heading reset + _data.cnt = 0; + _data.intx = _data.inty = 0; + _err = 0; + /// Thread stuff. _init = true; _active = true; if (_do_display) { - _drawThread = unique_ptr(new std::thread(&Trackball::processDrawQ, this)); + _drawThread = make_unique(&Trackball::processDrawQ, this); } // main processing thread - _thread = unique_ptr(new std::thread(&Trackball::process, this)); + _thread = make_unique(&Trackball::process, this); } /// @@ -453,7 +557,7 @@ Trackball::Trackball(string cfg_fn) /// Trackball::~Trackball() { - LOG("Closing sphere tracker.."); + LOG("Closing sphere tracker"); _init = false; _active = false; @@ -467,6 +571,19 @@ Trackball::~Trackball() } } +/// +/// +/// +void Trackball::resetData() +{ + DATA new_data; + new_data.cnt = _data.cnt; // preserve cnt across resets (but reset seq) + new_data.intx = _data.intx; // can preserve intx/y because they're not affected by heading reset + new_data.inty = _data.inty; + + _data = new_data; +} + /// /// /// @@ -475,33 +592,21 @@ void Trackball::reset() _reset = true; /// Clear maps if we can't search the entire sphere to relocalise. - if (!_global_search) { + if (!_do_global_search) { //FIXME: possible for users to specify sphere_template without enabling global search.. _sphere_template.copyTo(_sphere_map); _clean_map = true; } - /// Reset sphere. - _R_roi = Mat::eye(3, 3, CV_64F); - - /// Reset data. - _seq = 0; // indicates new sequence started - _posx = 0; // reset because heading was lost - _posy = 0; - _heading = 0; - - // test data - _dist = 0; - _ang_dist = 0; - _step_avg = 0; - _step_var = 0; - _evals_avg = 0; + resetData(); /// Drawing. if (_do_display) { _R_roi_hist.clear(); _pos_heading_hist.clear(); } + + _do_reset = false; } /// @@ -515,7 +620,7 @@ void Trackball::process() if (!SetThreadHighPriority()) { LOG_ERR("Error! Unable to set thread priority!"); } else { - LOG("Set processing thread priority to HIGH!"); + LOG_DBG("Set processing thread priority to HIGH!"); } /// Sphere tracking loop. @@ -524,19 +629,22 @@ void Trackball::process() double t1, t2, t3, t4, t5, t6; double t1avg = 0, t2avg = 0, t3avg = 0, t4avg = 0, t5avg = 0, t6avg = 0; double tfirst = -1, tlast = 0; - while (_active && _frameGrabber->getNextFrameSet(_src_frame, _roi_frame, _ts)) { + while (!_kill && _active && _frameGrabber->getNextFrameSet(_src_frame, _roi_frame, _data.ts, _data.ms)) { t1 = ts_ms(); PRINT(""); - LOG("Frame %d", _cnt); + LOG("Frame %d", _data.cnt); + + /// Handle reset request + if (_do_reset) { + nbad = 0; + reset(); + } /// Localise current view of sphere. - if (!doSearch(_global_search)) { - t2 = ts_ms(); - t3 = ts_ms(); - t4 = ts_ms(); - t5 = ts_ms(); - LOG_ERR("Error! Could not match current sphere orientation to within error threshold (%f).\nNo data will be output for this frame!", _error_thresh); + if (!doSearch(_do_global_search)) { + t2 = t3 = t4 = t5 = ts_ms(); + LOG_WRN("Warning! Could not match current sphere orientation to within error threshold (%f).\nNo data will be output for this frame!", _error_thresh); nbad++; } else { @@ -555,21 +663,21 @@ void Trackball::process() /// Handle failed localisation. if ((_max_bad_frames >= 0) && (nbad > _max_bad_frames)) { - _seq = 0; nbad = 0; reset(); } else { - _seq++; + _data.seq++; } if (_do_display) { - shared_ptr data = shared_ptr(new DrawData()); + auto data = make_shared(); + data->log_frame = _data.cnt; data->src_frame = _src_frame.clone(); data->roi_frame = _roi_frame.clone(); data->sphere_map = _sphere_map.clone(); data->sphere_view = _sphere_view.clone(); - data->dr_roi = _dr_roi; - data->R_roi = _R_roi.clone(); + data->dr_roi = _data.dr_roi; + data->R_roi = _data.R_roi.clone(); data->R_roi_hist = _R_roi_hist; data->pos_heading_hist = _pos_heading_hist; @@ -578,7 +686,7 @@ void Trackball::process() t6 = ts_ms(); /// Timing. - if (_cnt > 0) { // skip first frame (often global search...) + if (_data.cnt > 0) { // skip first frame (often global search...) t1avg += t1 - t0; t2avg += t2 - t1; t3avg += t3 - t2; @@ -587,7 +695,7 @@ void Trackball::process() t6avg += t6 - t5; // opt evals - _evals_avg += _nevals; + _data.evals_avg += _nevals; } LOG("Timing grab/opt/map/plot/log/disp: %.1f / %.1f / %.1f / %.1f / %.1f / %.1f ms", t1 - t0, t2 - t1, t3 - t2, t4 - t3, t5 - t4, t6 - t5); @@ -595,14 +703,14 @@ void Trackball::process() double fps_out = (t6 - prev_t6) > 0 ? 1000 / (t6 - prev_t6) : 0; static double fps_avg = fps_out; fps_avg += 0.25 * (fps_out - fps_avg); - static double prev_ts = _ts; - double fps_in = (_ts - prev_ts) > 0 ? 1000 / (_ts - prev_ts) : 0; - LOG("Average frame rate [curr input / output]: %.1f [%.1f / %.1f] fps", fps_avg, fps_in, fps_out); + static double prev_ts = _data.ts; + double fps_in = (_data.ts - prev_ts) > 0 ? 1000 / (_data.ts - prev_ts) : 0; + LOG("Average frame rate [in/out]: %.1f [%.1f / %.1f] fps", fps_avg, fps_in, fps_out); prev_t6 = t6; - prev_ts = _ts; + prev_ts = _data.ts; /// Always increment frame counter. - _cnt++; + _data.cnt++; t0 = ts_ms(); if (tfirst < 0) { tfirst = t0; } @@ -613,16 +721,17 @@ void Trackball::process() _frameGrabber->terminate(); // make sure we've stopped grabbing frames as well - if (_cnt > 1) { - PRINT(""); - LOG("Trackball timing"); + if (_data.cnt > 1) { + PRINT("\n----------------------------------------------------------------------------"); + LOG("Trackball timing:"); LOG("Average grab/opt/map/plot/log/disp time: %.1f / %.1f / %.1f / %.1f / %.1f / %.1f ms", - t1avg / (_cnt - 1), t2avg / (_cnt - 1), t3avg / (_cnt - 1), t4avg / (_cnt - 1), t5avg / (_cnt - 1), t6avg / (_cnt - 1)); - LOG("Average fps: %.2f", 1000. * (_cnt - 1) / (tlast - tfirst)); + t1avg / (_data.cnt - 1), t2avg / (_data.cnt - 1), t3avg / (_data.cnt - 1), t4avg / (_data.cnt - 1), t5avg / (_data.cnt - 1), t6avg / (_data.cnt - 1)); + LOG("Average fps: %.2f", 1000. * (_data.cnt - 1) / (tlast - tfirst)); PRINT(""); - LOG("Optimiser test data."); - LOG("Average number evals / frame: %.1f", _evals_avg / (_cnt - 1)); + LOG("Optimiser test data:"); + LOG("Average number evals / frame: %.1f", _data.evals_avg / (_data.cnt - 1)); + PRINT("----------------------------------------------------------------------------"); } _active = false; @@ -640,12 +749,12 @@ bool Trackball::doSearch(bool allow_global = false) /// Run optimisation and save result. _nevals = 0; if (!_reset) { - _dr_roi = guess; - _err = _localOpt->search(_roi_frame, _R_roi, _dr_roi); // _dr_roi contains optimal rotation + _data.dr_roi = guess; + _err = _localOpt->search(_roi_frame, _data.R_roi, _data.dr_roi); // _dr_roi contains optimal rotation _nevals = _localOpt->getNumEval(); } else { - _dr_roi = CmPoint64f(0, 0, 0); + _data.dr_roi = CmPoint64f(0, 0, 0); _err = 0; } @@ -653,34 +762,34 @@ bool Trackball::doSearch(bool allow_global = false) bool bad_frame = _error_thresh >= 0 ? (_err > _error_thresh) : false; if (allow_global && (bad_frame || (_reset && !_clean_map))) { - LOG("Doing global search.."); + LOG("Doing global search"); // do global search - _err = _globalOpt->search(_roi_frame, _R_roi, _r_roi); // use last know orientation, _r_roi, as guess and update with result + _err = _globalOpt->search(_roi_frame, _data.R_roi, _data.r_roi); // use last know orientation, _r_roi, as guess and update with result _nevals = _globalOpt->getNumEval(); bad_frame = _error_thresh >= 0 ? (_err > _error_thresh) : false; // if global search failed as well, just reset global orientation too if (bad_frame) { - _r_roi = CmPoint64f(0, 0, 0); // zero absolute orientation + _data.r_roi = CmPoint64f(0, 0, 0); // zero absolute orientation } // reset sphere to found orientation with zero motion - _dr_roi = CmPoint64f(0, 0, 0); // zero relative rotation - _R_roi = CmPoint64f::omegaToMatrix(_r_roi); + _data.dr_roi = CmPoint64f(0, 0, 0); // zero relative rotation + _data.R_roi = CmPoint64f::omegaToMatrix(_data.r_roi); } else { /// Accumulate sphere orientation. - Mat tmpR = CmPoint64f::omegaToMatrix(_dr_roi); // relative rotation (angle-axis) in ROI frame - _R_roi = tmpR * _R_roi; // pre-multiply to accumulate orientation matrix - _r_roi = CmPoint64f::matrixToOmega(_R_roi); + Mat tmpR = CmPoint64f::omegaToMatrix(_data.dr_roi); // relative rotation (angle-axis) in ROI frame + _data.R_roi = tmpR * _data.R_roi; // pre-multiply to accumulate orientation matrix + _data.r_roi = CmPoint64f::matrixToOmega(_data.R_roi); } - LOG("optimum sphere rotation:\t%.3f %.3f %.3f (err=%.3e/its=%d)", _dr_roi[0], _dr_roi[1], _dr_roi[2], _err, _nevals); - LOG_DBG("Current sphere orientation:\t%.3f %.3f %.3f", _r_roi[0], _r_roi[1], _r_roi[2]); + LOG("optimum sphere rotation:\t%.3f %.3f %.3f (err=%.3e/its=%d)", _data.dr_roi[0], _data.dr_roi[1], _data.dr_roi[2], _err, _nevals); + LOG_DBG("Current sphere orientation:\t%.3f %.3f %.3f", _data.r_roi[0], _data.r_roi[1], _data.r_roi[2]); if (!bad_frame) { - guess = 0.9 * _dr_roi + 0.1 * guess; + guess = 0.9 * _data.dr_roi + 0.1 * guess; } else { guess = CmPoint64f(0,0,0); } @@ -693,7 +802,7 @@ bool Trackball::doSearch(bool allow_global = false) /// void Trackball::updateSphere() { - double* m = reinterpret_cast(_R_roi.data); // absolute orientation (3d mat) in ROI frame + double* m = reinterpret_cast(_data.R_roi.data); // absolute orientation (3d mat) in ROI frame if (_do_display) { _sphere_view.setTo(Scalar::all(128)); @@ -710,7 +819,7 @@ void Trackball::updateSphere() cnt++; // rotate point about rotation axis (sphere coords) - double* v = &_p1s_lut[(i * _roi_w + j) * 3]; + double* v = &(*_p1s_lut)[(i * _roi_w + j) * 3]; //p2s[0] = m[0] * v[0] + m[1] * v[1] + m[2] * v[2]; //p2s[1] = m[3] * v[0] + m[4] * v[1] + m[5] * v[2]; //p2s[2] = m[6] * v[0] + m[7] * v[1] + m[8] * v[2]; @@ -762,25 +871,25 @@ void Trackball::updatePath() // _R_roi // abs vec roi - _r_roi = CmPoint64f::matrixToOmega(_R_roi); + _data.r_roi = CmPoint64f::matrixToOmega(_data.R_roi); // rel vec cam - _dr_cam = _dr_roi.getTransformed(_roi_to_cam_R); + _data.dr_cam = _data.dr_roi/*.getTransformed(_roi_to_cam_R)*/; // abs mat cam - _R_cam = _roi_to_cam_R * _R_roi; + _data.R_cam = /*_roi_to_cam_R * */_data.R_roi; // abs vec cam - _r_cam = CmPoint64f::matrixToOmega(_R_cam); + _data.r_cam = CmPoint64f::matrixToOmega(_data.R_cam); // rel vec world - _dr_lab = _dr_cam.getTransformed(_cam_to_lab_R); + _data.dr_lab = _data.dr_cam.getTransformed(_cam_to_lab_R); // abs mat world - _R_lab = _cam_to_lab_R * _R_cam; + _data.R_lab = _cam_to_lab_R * _data.R_cam; // abs vec world - _r_lab = CmPoint64f::matrixToOmega(_R_lab); + _data.r_lab = CmPoint64f::matrixToOmega(_data.R_lab); //// store initial rotation from template (if any) @@ -808,64 +917,64 @@ void Trackball::updatePath() // running speed, radians/frame (-ve rotation around x-axis causes y-axis translation & vice-versa!!) - _velx = _dr_lab[1]; - _vely = -_dr_lab[0]; - _step_mag = sqrt(_velx * _velx + _vely * _vely); // magnitude (radians) of ball rotation excluding turning (change in heading) + _data.velx = _data.dr_lab[1]; + _data.vely = -_data.dr_lab[0]; + _data.step_mag = sqrt(_data.velx * _data.velx + _data.vely * _data.vely); // magnitude (radians) of ball rotation excluding turning (change in heading) // test data - if (_cnt > 0) { - _dist += _step_mag; - double v = _dr_lab.len(); - double delta = v - _step_avg; - _step_avg += delta / static_cast(_cnt); // running average - double delta2 = v - _step_avg; - _step_var += delta * delta2; // running variance (Welford's alg) + if (_data.cnt > 0) { + _data.dist += _data.step_mag; + double v = _data.dr_lab.len(); + double delta = v - _data.step_avg; + _data.step_avg += delta / static_cast(_data.cnt); // running average + double delta2 = v - _data.step_avg; + _data.step_var += delta * delta2; // running variance (Welford's alg) } // running direction - _step_dir = atan2(_vely, _velx); - if (_step_dir < 0) { _step_dir += 360 * CM_D2R; } + _data.step_dir = atan2(_data.vely, _data.velx); + if (_data.step_dir < 0) { _data.step_dir += 360 * CM_D2R; } // integrated x/y pos (optical mouse style) - _intx += _velx; - _inty += _vely; + _data.intx += _data.velx; + _data.inty += _data.vely; // integrate bee heading - _heading -= _dr_lab[2]; - while (_heading < 0) { _heading += 360 * CM_D2R; } - while (_heading >= 360 * CM_D2R) { _heading -= 360 * CM_D2R; } - _ang_dist += abs(_dr_lab[2]); + _data.heading -= _data.dr_lab[2]; + while (_data.heading < 0) { _data.heading += 360 * CM_D2R; } + while (_data.heading >= 360 * CM_D2R) { _data.heading -= 360 * CM_D2R; } + _data.ang_dist += abs(_data.dr_lab[2]); // integrate 2d position { const int steps = 4; // increasing this doesn't help much - double step = _step_mag / steps; + double step = _data.step_mag / steps; static double prev_heading = 0; if (_reset) { prev_heading = 0; } - double heading_step = (_heading - prev_heading); + double heading_step = (_data.heading - prev_heading); while (heading_step >= 180 * CM_D2R) { heading_step -= 360 * CM_D2R; } while (heading_step < -180 * CM_D2R) { heading_step += 360 * CM_D2R; } heading_step /= steps; // do after wrapping above // super-res integration - CmPoint64f dir(_velx, _vely, 0); + CmPoint64f dir(_data.velx, _data.vely, 0); dir.normalise(); dir.rotateAboutNorm(CmPoint(0, 0, 1), prev_heading + heading_step / 2.0); for (int i = 0; i < steps; i++) { - _posx += step * dir[0]; - _posy += step * dir[1]; + _data.posx += step * dir[0]; + _data.posy += step * dir[1]; dir.rotateAboutNorm(CmPoint(0, 0, 1), heading_step); } - prev_heading = _heading; + prev_heading = _data.heading; } if (_do_display) { // update pos hist (in ROI-space!) - _R_roi_hist.push_back(_R_roi.clone()); + _R_roi_hist.push_back(_data.R_roi.clone()); while (_R_roi_hist.size() > DRAW_SPHERE_HIST_LENGTH) { _R_roi_hist.pop_front(); } - _pos_heading_hist.push_back(CmPoint(_posx, _posy, _heading)); + _pos_heading_hist.push_back(CmPoint(_data.posx, _data.posy, _data.heading)); while (_pos_heading_hist.size() > DRAW_FICTIVE_PATH_LENGTH) { _pos_heading_hist.pop_front(); } @@ -880,30 +989,42 @@ bool Trackball::logData() std::stringstream ss; ss.precision(14); + static double prev_ts = _data.ts; + // frame_count - ss << _cnt << ", "; + ss << _data.cnt << ", "; // rel_vec_cam[3] | error - ss << _dr_cam[0] << ", " << _dr_cam[1] << ", " << _dr_cam[2] << ", " << _err << ", "; + ss << _data.dr_cam[0] << ", " << _data.dr_cam[1] << ", " << _data.dr_cam[2] << ", " << _err << ", "; // rel_vec_world[3] - ss << _dr_lab[0] << ", " << _dr_lab[1] << ", " << _dr_lab[2] << ", "; + ss << _data.dr_lab[0] << ", " << _data.dr_lab[1] << ", " << _data.dr_lab[2] << ", "; // abs_vec_cam[3] - ss << _r_cam[0] << ", " << _r_cam[1] << ", " << _r_cam[2] << ", "; + ss << _data.r_cam[0] << ", " << _data.r_cam[1] << ", " << _data.r_cam[2] << ", "; // abs_vec_world[3] - ss << _r_lab[0] << ", " << _r_lab[1] << ", " << _r_lab[2] << ", "; + ss << _data.r_lab[0] << ", " << _data.r_lab[1] << ", " << _data.r_lab[2] << ", "; // integrated xpos | integrated ypos | integrated heading - ss << _posx << ", " << _posy << ", " << _heading << ", "; + ss << _data.posx << ", " << _data.posy << ", " << _data.heading << ", "; // direction (radians) | speed (radians/frame) - ss << _step_dir << ", " << _step_mag << ", "; + ss << _data.step_dir << ", " << _data.step_mag << ", "; // integrated x movement | integrated y movement (mouse output equivalent) - ss << _intx << ", " << _inty << ", "; - // timestamp | sequence number - ss << _ts << ", " << _seq << std::endl; + ss << _data.intx << ", " << _data.inty << ", "; + // timestamp (ms since midnight) | sequence number | delta ts (ms since last frame) + ss << _data.ms << ", " << _data.seq << ", " << (_data.ts - prev_ts) << std::endl; + + prev_ts = _data.ts; // caution - be sure that this time delta corresponds to deltas for step size, rotation rate, etc!! if(_do_socket) _socket->addMsg(ss.str()); // async i/o - return _log->addMsg(ss.str()); + bool ret = true; + if (_do_sock_output) { + ret &= _data_sock->addMsg("FT, " + ss.str()); + } + if (_do_com_output) { + ret &= _data_com->addMsg("FT, " + ss.str()); + } + ret &= _data_log->addMsg(ss.str()); + return ret; } /// @@ -913,9 +1034,9 @@ double Trackball::testRotation(const double x[3]) { static double lmat[9]; CmPoint64f tmp(x[0], x[1], x[2]); - tmp.omegaToMatrix(lmat); // relative rotation in camera frame - double* rmat = (double*)_R_roi.data; // pre-multiply to orientation matrix - static double m[9]; // absolute orientation in camera frame + tmp.omegaToMatrix(lmat); // relative rotation in camera frame + double* rmat = (double*)_data.R_roi.data; // pre-multiply to orientation matrix + static double m[9]; // absolute orientation in camera frame m[0] = lmat[0] * rmat[0] + lmat[1] * rmat[3] + lmat[2] * rmat[6]; m[1] = lmat[0] * rmat[1] + lmat[1] * rmat[4] + lmat[2] * rmat[7]; @@ -949,7 +1070,7 @@ double Trackball::testRotation(const double x[3]) for (int i = 0; i < _roi_h; i++) { uint8_t* pmask = _roi_mask.ptr(i); uint8_t* proi = _roi_frame.ptr(i); - double* v = &_p1s_lut[i * _roi_w * 3]; + double* v = &(*_p1s_lut)[i * _roi_w * 3]; for (int j = 0; j < _roi_w; j++) { if (pmask[j] < 255) { continue; } cnt++; @@ -1093,7 +1214,7 @@ void Trackball::processDrawQ() if (!_active) { break; } /// Retrieve data. - shared_ptr data = _drawQ.back(); + auto data = _drawQ.back(); /// Clear all other frames (only draw latest available). if (_drawQ.size() > 1) { @@ -1130,6 +1251,7 @@ void Trackball::drawCanvas(shared_ptr data) Mat& sphere_map = data->sphere_map; deque& R_roi_hist = data->R_roi_hist; deque& pos_heading_hist = data->pos_heading_hist; + unsigned int log_frame = data->log_frame; /// Draw source image. double radPerPix = _sphere_rad * 3.0 / (2 * DRAW_CELL_DIM); @@ -1163,74 +1285,76 @@ void Trackball::drawCanvas(shared_ptr data) static Mat resize_roi(DRAW_CELL_DIM, DRAW_CELL_DIM, CV_8UC1); cv::resize(roi_frame, resize_roi, resize_roi.size()); Mat draw_roi = canvas(Rect(2 * DRAW_CELL_DIM, 0, DRAW_CELL_DIM, DRAW_CELL_DIM)); - cv::cvtColor(resize_roi, draw_roi, CV_GRAY2BGR); + cv::cvtColor(resize_roi, draw_roi, cv::COLOR_GRAY2BGR); /// Draw warped diff ROI. static Mat resize_diff(DRAW_CELL_DIM, DRAW_CELL_DIM, CV_8UC1); cv::resize(diff_roi, resize_diff, resize_diff.size()); Mat draw_diff = canvas(Rect(3 * DRAW_CELL_DIM, 0, DRAW_CELL_DIM, DRAW_CELL_DIM)); - cv::cvtColor(resize_diff, draw_diff, CV_GRAY2BGR); + cv::cvtColor(resize_diff, draw_diff, cv::COLOR_GRAY2BGR); /// Draw current sphere view. static Mat resize_view(DRAW_CELL_DIM, 2 * DRAW_CELL_DIM, CV_8UC1); cv::resize(sphere_view, resize_view, resize_view.size()); Mat draw_view = canvas(Rect(2 * DRAW_CELL_DIM, 1 * DRAW_CELL_DIM, 2 * DRAW_CELL_DIM, DRAW_CELL_DIM)); - cv::cvtColor(resize_view, draw_view, CV_GRAY2BGR); + cv::cvtColor(resize_view, draw_view, cv::COLOR_GRAY2BGR); /// Draw current sphere map. static Mat resize_map(DRAW_CELL_DIM, 2 * DRAW_CELL_DIM, CV_8UC1); cv::resize(sphere_map, resize_map, resize_map.size()); Mat draw_map = canvas(Rect(2 * DRAW_CELL_DIM, 2 * DRAW_CELL_DIM, 2 * DRAW_CELL_DIM, DRAW_CELL_DIM)); - cv::cvtColor(resize_map, draw_map, CV_GRAY2BGR); + cv::cvtColor(resize_map, draw_map, cv::COLOR_GRAY2BGR); /// Draw fictive path. //FIXME: add heading arrow to fictive path { int npts = pos_heading_hist.size(); - double minx = 0, maxx = 0, miny = 0, maxy = 0; - for (int i = 0; i < npts; i++) { - double x = pos_heading_hist[i].x, y = pos_heading_hist[i].y; - if (x < minx) - minx = x; - if (x > maxx) - maxx = x; - if (y < miny) - miny = y; - if (y > maxy) - maxy = y; - } - double scl = 1; - if (npts > 1) { - double sclx = double(DRAW_CELL_DIM - 8) / (2.0 * std::max(fabs(minx), fabs(maxx))); - double scly = double(DRAW_CELL_DIM - 4) / std::max(fabs(miny), fabs(maxy)); - scl = std::min(sclx, scly); - } + if (npts > 0) { + double minx = DBL_MAX, maxx = -DBL_MAX, miny = DBL_MAX, maxy = -DBL_MAX; + for (auto p : pos_heading_hist) { + double x = p.x, y = p.y; + if (x < minx) + minx = x; + if (x > maxx) + maxx = x; + if (y < miny) + miny = y; + if (y > maxy) + maxy = y; + } + double scl = 1; + if (npts > 1) { + double sclx = (minx != maxx) ? double(DRAW_CELL_DIM - 8) / (maxx - minx) : 1; + double scly = (miny != maxy) ? double(2 * DRAW_CELL_DIM - 4) / (maxy - miny) : 1; + scl = std::min(sclx, scly); + } - Mat draw_path = canvas(Rect(0, 2 * DRAW_CELL_DIM, 2 * DRAW_CELL_DIM, DRAW_CELL_DIM)); - double cx = DRAW_CELL_DIM, cy = 0.5 * DRAW_CELL_DIM; - double ppx = cx, ppy = cy; - for (int i = 0; i < npts; i++) { - double px = cx + scl * pos_heading_hist[i].y, py = cy - scl * pos_heading_hist[i].x; - cv::line(draw_path, - cv::Point(static_cast(round(ppx * 16)), static_cast(round(ppy * 16))), - cv::Point(static_cast(round(px * 16)), static_cast(round(py * 16))), - CV_RGB(255, 255, 255), 1, CV_AA, 4); - ppx = px; - ppy = py; + Mat draw_path = canvas(Rect(0, 2 * DRAW_CELL_DIM, 2 * DRAW_CELL_DIM, DRAW_CELL_DIM)); + double my = (2 * DRAW_CELL_DIM - scl * (maxy - miny)) / 2, mx = DRAW_CELL_DIM - (DRAW_CELL_DIM - scl * (maxx - minx)) / 2; // zeroth pixel for y/x data axes + double ppx = mx - scl * (pos_heading_hist[0].x - minx), ppy = my + scl * (pos_heading_hist[0].y - miny); + for (int i = 1; i < npts; i++) { + double px = mx - scl * (pos_heading_hist[i].x - minx), py = my + scl * (pos_heading_hist[i].y - miny); + cv::line(draw_path, + cv::Point(static_cast(round(ppy * 16)), static_cast(round(ppx * 16))), + cv::Point(static_cast(round(py * 16)), static_cast(round(px * 16))), + CV_RGB(255, 255, 255), 1, cv::LINE_AA, 4); + ppx = px; + ppy = py; + } } } /// Draw sphere orientation history (animal position history on sphere). { static const CmPoint up(0, 0, -1.0); - static CmPoint up_roi = up.getTransformed(_roi_to_cam_R.t() * _cam_to_lab_R.t()).getNormalised() * _r_d_ratio; + CmPoint up_roi = up.getTransformed(/*_roi_to_cam_R.t() * */_cam_to_lab_R.t()).getNormalised() * _r_d_ratio; double ppx = -1, ppy = -1; - draw_camera->vectorToPixelIndex(up_roi, ppx, ppy); + draw_camera->vectorToPixelIndex(up_roi, ppx, ppy); // don't need to correct for roi2cam R because origin is implicitly centre of draw_camera image anyway for (int i = R_roi_hist.size() - 1; i >= 0; i--) { // multiply by transpose - see Localiser::testRotation() CmPoint vec = up_roi.getTransformed(R_roi * R_roi_hist[i].t()).getNormalised() * _r_d_ratio; - + // sphere is centred at (0,0,1) cam coords, with r double px = -1, py = -1; @@ -1240,8 +1364,8 @@ void Trackball::drawCanvas(shared_ptr data) draw_camera->vectorToPixelIndex(vec, px, py); // draw link - if ((ppx >= 0) && (ppy >= 0) && (px >= 0) && (py >= 0) && (ppx < draw_input.cols) && (ppy > draw_input.rows) && (px < draw_input.cols) && (py < draw_input.rows)) { - float mix = (i + 0.5f) / static_cast(R_roi_hist.size()); + if ((ppx >= 0) && (ppy >= 0) && (px >= 0) && (py >= 0) && (ppx < draw_input.cols) && (ppy < draw_input.rows) && (px < draw_input.cols) && (py < draw_input.rows)) { + float mix = 0.33f + 0.67f * (i + 0.5f) / static_cast(R_roi_hist.size()); cv::Vec3b rgb = draw_input.at(static_cast((ppy + py) / 2.f), static_cast((ppx + px) / 2.f)); // px/py are pixel index values int b = static_cast((1 - mix) * rgb[0] + mix * 255.f + 0.5f); int g = static_cast((1 - mix) * rgb[1] + mix * 255.f + 0.5f); @@ -1250,7 +1374,7 @@ void Trackball::drawCanvas(shared_ptr data) cv::line(draw_input, cv::Point(static_cast(round(px * 16)), static_cast(round(py * 16))), cv::Point(static_cast(round(ppx * 16)), static_cast(round(ppy * 16))), - CV_RGB(r, g, b), 1, CV_AA, 4); + CV_RGB(r, g, b), 1, cv::LINE_AA, 4); } } ppx = px; @@ -1262,19 +1386,19 @@ void Trackball::drawCanvas(shared_ptr data) cv::line(canvas, cv::Point(2 * DRAW_CELL_DIM, 0 * DRAW_CELL_DIM) * 16, cv::Point(2 * DRAW_CELL_DIM, 3 * DRAW_CELL_DIM) * 16, - CV_RGB(255, 255, 255), 2, CV_AA, 4); + CV_RGB(255, 255, 255), 2, cv::LINE_AA, 4); cv::line(canvas, cv::Point(0 * DRAW_CELL_DIM, 2 * DRAW_CELL_DIM) * 16, cv::Point(4 * DRAW_CELL_DIM, 2 * DRAW_CELL_DIM) * 16, - CV_RGB(255, 255, 255), 2, CV_AA, 4); + CV_RGB(255, 255, 255), 2, cv::LINE_AA, 4); cv::line(canvas, cv::Point(3 * DRAW_CELL_DIM, 0 * DRAW_CELL_DIM) * 16, cv::Point(3 * DRAW_CELL_DIM, 1 * DRAW_CELL_DIM) * 16, - CV_RGB(255, 255, 255), 2, CV_AA, 4); + CV_RGB(255, 255, 255), 2, cv::LINE_AA, 4); cv::line(canvas, cv::Point(2 * DRAW_CELL_DIM, 1 * DRAW_CELL_DIM) * 16, cv::Point(4 * DRAW_CELL_DIM, 1 * DRAW_CELL_DIM) * 16, - CV_RGB(255, 255, 255), 2, CV_AA, 4); + CV_RGB(255, 255, 255), 2, cv::LINE_AA, 4); /// Draw text (with shadow). shadowText(canvas, string("Processed ") + dateString(), @@ -1306,8 +1430,12 @@ void Trackball::drawCanvas(shared_ptr data) cv::imshow("FicTrac-debug", canvas); uint16_t key = cv::waitKey(1); if (key == 0x1B) { // esc - LOG("Exiting.."); - _active = false; + LOG("Exiting"); + terminate(); + } + else if (key == 0x52) { // shift+R + LOG("Resetting map!"); + _do_reset = true; } if (_save_raw) { @@ -1316,21 +1444,39 @@ void Trackball::drawCanvas(shared_ptr data) if (_save_debug) { _debug_vid.write(canvas); } + if (_save_raw || _save_debug) { + _vid_frames->addMsg(to_string(log_frame) + "\n"); + } +} + +/// +/// +/// +shared_ptr Trackball::getState() +{ + return make_shared(_data); } -void Trackball::printState() +/// +/// +/// +void Trackball::dumpState() { - PRINT(""); + PRINT("\n----------------------------------------------------------------------"); PRINT("Trackball state"); - PRINT("Sphere orientation (cam): %f %f %f", _r_cam[0], _r_cam[1], _r_cam[2]); - PRINT("Total heading rotation: %f deg", _ang_dist * CM_R2D); - PRINT("Heading direction: %f deg (%f %% total heading rotation)", _heading * CM_R2D, _heading * 100. / _ang_dist); - PRINT("Accumulated X/Y motion: %f / %f rad (%f / %f * 2pi)", _intx, _inty, _intx / (2 * CM_PI), _inty / (2 * CM_PI)); - PRINT("Distance travelled: %f rad (%f * 2pi)", _dist, _dist / (2 * CM_PI)); - PRINT("Integrated X/Y position: (%.3e, %.3e) rad (%f / %f %% total path length)", _posx, _posy, _posx * 100. / _dist, _posy * 100. / _dist); - PRINT("Average/stdev rotation: %.3e / %.3e rad/frame", _step_avg, sqrt(_step_var / _cnt)); // population variance + PRINT("Sphere orientation (cam): %f %f %f", _data.r_cam[0], _data.r_cam[1], _data.r_cam[2]); + PRINT("Total heading rotation: %f deg", _data.ang_dist * CM_R2D); + PRINT("Heading direction: %f deg (%f %% total heading rotation)", _data.heading * CM_R2D, _data.heading * 100. / _data.ang_dist); + PRINT("Accumulated X/Y motion: %f / %f rad (%f / %f * 2pi)", _data.intx, _data.inty, _data.intx / (2 * CM_PI), _data.inty / (2 * CM_PI)); + PRINT("Distance travelled: %f rad (%f * 2pi)", _data.dist, _data.dist / (2 * CM_PI)); + PRINT("Integrated X/Y position: (%.3e, %.3e) rad (%f / %f %% total path length)", _data.posx, _data.posy, _data.posx * 100. / _data.dist, _data.posy * 100. / _data.dist); + PRINT("Average/stdev rotation: %.3e / %.3e rad/frame", _data.step_avg, sqrt(_data.step_var / _data.cnt)); // population variance + PRINT("\n----------------------------------------------------------------------"); } +/// +/// +/// bool Trackball::writeTemplate(std::string fn) { if (!_init) { return false; } diff --git a/src/drawing.cpp b/src/drawing.cpp index f6278c8..e7f2137 100644 --- a/src/drawing.cpp +++ b/src/drawing.cpp @@ -68,10 +68,10 @@ void drawCircle(cv::Mat& img, shared_ptr> circ_pts, const cv /// Draw lines between circumference points. Point2d p1 = circ_pts->front(), p2; - for (int i = 1; i < circ_pts->size(); i++) { + for (unsigned int i = 1; i < circ_pts->size(); i++) { /// Draw dashed/solid. p2 = (*circ_pts)[i]; - if (solid || (i % 2)) { cv::line(img, 4 * p1, 4 * p2, colour, 2, CV_AA, 2); } + if (solid || (i % 2)) { cv::line(img, 4 * p1, 4 * p2, colour, 2, cv::LINE_AA, 2); } p1 = p2; } } @@ -128,23 +128,25 @@ void drawCursor(Mat& rgb, const Point2d& pt, cv::Scalar colour) const int inner_rad = std::max(int(rgb.cols/500+0.5), 2); const int outer_rad = std::max(int(rgb.cols/150+0.5), 5); - cv::line(rgb, pt-Point2d(outer_rad,outer_rad), pt-Point2d(inner_rad,inner_rad), colour, 1, CV_AA); - cv::line(rgb, pt+Point2d(inner_rad,inner_rad), pt+Point2d(outer_rad,outer_rad), colour, 1, CV_AA); - cv::line(rgb, pt-Point2d(-outer_rad,outer_rad), pt-Point2d(-inner_rad,inner_rad), colour, 1, CV_AA); - cv::line(rgb, pt+Point2d(-inner_rad,inner_rad), pt+Point2d(-outer_rad,outer_rad), colour, 1, CV_AA); + cv::line(rgb, pt-Point2d(outer_rad,outer_rad), pt-Point2d(inner_rad,inner_rad), colour, 1, cv::LINE_AA); + cv::line(rgb, pt+Point2d(inner_rad,inner_rad), pt+Point2d(outer_rad,outer_rad), colour, 1, cv::LINE_AA); + cv::line(rgb, pt-Point2d(-outer_rad,outer_rad), pt-Point2d(-inner_rad,inner_rad), colour, 1, cv::LINE_AA); + cv::line(rgb, pt+Point2d(-inner_rad,inner_rad), pt+Point2d(-outer_rad,outer_rad), colour, 1, cv::LINE_AA); } /// /// Draw transformed axes. /// void drawAxes(Mat& rgb, const CameraModelPtr cam_model, const Mat& R, const Mat& t, const cv::Scalar colour) -{ +{ + if (R.empty() || t.empty()) { return; } + /// Transformed axes. Mat sx = R * (cv::Mat_(3,1) << 1,0,0) + t; Mat sy = R * (cv::Mat_(3,1) << 0,1,0) + t; Mat sz = R * (cv::Mat_(3,1) << 0,0,1) + t; - /// Draw transformed axes. + /// Find axes origin. double vec[3]; Point2d pt, pt0; vec[0] = t.at(0,0); @@ -159,15 +161,15 @@ void drawAxes(Mat& rgb, const CameraModelPtr cam_model, const Mat& R, const Mat& vec[2] = sx.at(2, 0); cam_model->vectorToPixel(vec, pt.x, pt.y); - cv::line(rgb, 4 * pt0, 4 * pt, colour, 2, CV_AA, 2); - cv::putText(rgb, "x", pt + Point2d(10, 0), cv::FONT_HERSHEY_SIMPLEX, 1.0, colour, 1, CV_AA); + cv::line(rgb, 4 * pt0, 4 * pt, colour, 2, cv::LINE_AA, 2); + cv::putText(rgb, "x", pt + Point2d(10, 0), cv::FONT_HERSHEY_SIMPLEX, 1.0, colour, 1, cv::LINE_AA); // indicate in to or out of the page if (vec[2] < t.at(2, 0)) { - cv::circle(rgb, 4 * pt, 4 * 4, colour, 2, CV_AA, 2); + cv::circle(rgb, 4 * pt, 4 * 4, colour, 2, cv::LINE_AA, 2); } else { - cv::line(rgb, 4 * (pt + Point2d(-4, -4)), 4 * (pt + Point2d(4, 4)), colour, 2, CV_AA, 2); - cv::line(rgb, 4 * (pt + Point2d(-4, 4)), 4 * (pt + Point2d(4, -4)), colour, 2, CV_AA, 2); + cv::line(rgb, 4 * (pt + Point2d(-4, -4)), 4 * (pt + Point2d(4, 4)), colour, 2, cv::LINE_AA, 2); + cv::line(rgb, 4 * (pt + Point2d(-4, 4)), 4 * (pt + Point2d(4, -4)), colour, 2, cv::LINE_AA, 2); } } @@ -178,15 +180,15 @@ void drawAxes(Mat& rgb, const CameraModelPtr cam_model, const Mat& R, const Mat& vec[2] = sy.at(2, 0); cam_model->vectorToPixel(vec, pt.x, pt.y); - cv::line(rgb, 4 * pt0, 4 * pt, colour, 2, CV_AA, 2); - cv::putText(rgb, "y", pt + Point2d(10, 0), cv::FONT_HERSHEY_SIMPLEX, 1.0, colour, 1, CV_AA); + cv::line(rgb, 4 * pt0, 4 * pt, colour, 2, cv::LINE_AA, 2); + cv::putText(rgb, "y", pt + Point2d(10, 0), cv::FONT_HERSHEY_SIMPLEX, 1.0, colour, 1, cv::LINE_AA); // indicate in to or out of the page if (vec[2] < t.at(2, 0)) { - cv::circle(rgb, 4 * pt, 4 * 4, colour, 2, CV_AA, 2); + cv::circle(rgb, 4 * pt, 4 * 4, colour, 2, cv::LINE_AA, 2); } else { - cv::line(rgb, 4 * (pt + Point2d(-4, -4)), 4 * (pt + Point2d(4, 4)), colour, 2, CV_AA, 2); - cv::line(rgb, 4 * (pt + Point2d(-4, 4)), 4 * (pt + Point2d(4, -4)), colour, 2, CV_AA, 2); + cv::line(rgb, 4 * (pt + Point2d(-4, -4)), 4 * (pt + Point2d(4, 4)), colour, 2, cv::LINE_AA, 2); + cv::line(rgb, 4 * (pt + Point2d(-4, 4)), 4 * (pt + Point2d(4, -4)), colour, 2, cv::LINE_AA, 2); } } @@ -197,19 +199,59 @@ void drawAxes(Mat& rgb, const CameraModelPtr cam_model, const Mat& R, const Mat& vec[2] = sz.at(2, 0); cam_model->vectorToPixel(vec, pt.x, pt.y); - cv::line(rgb, 4 * pt0, 4 * pt, colour, 2, CV_AA, 2); - cv::putText(rgb, "z", pt + Point2d(10, 0), cv::FONT_HERSHEY_SIMPLEX, 1.0, colour, 1, CV_AA); + cv::line(rgb, 4 * pt0, 4 * pt, colour, 2, cv::LINE_AA, 2); + cv::putText(rgb, "z", pt + Point2d(10, 0), cv::FONT_HERSHEY_SIMPLEX, 1.0, colour, 1, cv::LINE_AA); // indicate in to or out of the page if (vec[2] < t.at(2, 0)) { - cv::circle(rgb, 4 * pt, 4 * 4, colour, 2, CV_AA, 2); + cv::circle(rgb, 4 * pt, 4 * 4, colour, 2, cv::LINE_AA, 2); } else { - cv::line(rgb, 4 * (pt + Point2d(-4, -4)), 4 * (pt + Point2d(4, 4)), colour, 2, CV_AA, 2); - cv::line(rgb, 4 * (pt + Point2d(-4, 4)), 4 * (pt + Point2d(4, -4)), colour, 2, CV_AA, 2); + cv::line(rgb, 4 * (pt + Point2d(-4, -4)), 4 * (pt + Point2d(4, 4)), colour, 2, cv::LINE_AA, 2); + cv::line(rgb, 4 * (pt + Point2d(-4, 4)), 4 * (pt + Point2d(4, -4)), colour, 2, cv::LINE_AA, 2); } } } +/// +/// Draw animal axis. +/// +void drawAnimalAxis(Mat& rgb, const CameraModelPtr cam_model, const Mat& R, const Mat& t, const double r, const cv::Scalar colour) +{ + if (R.empty() || t.empty()) { return; } + + /// Transformed axes. + Mat sx = R * 0.5 * (cv::Mat_(3, 1) << 1, 0, 0); + Mat sx1 = R * 0.45 * (cv::Mat_(3, 1) << 1, 0, 0); + Mat sx2 = R * 0.45 * (cv::Mat_(3, 1) << 1, 0, 0); + Mat sz = R * -1.0 * (cv::Mat_(3, 1) << 0, 0, 1) + t; + + ///// Find axes origin. + //double vec[3]; + //vec[0] = t.at(0, 0); + //vec[1] = t.at(1, 0); + //vec[2] = t.at(2, 0); + //Point2d o; + //cam_model->vectorToPixel(vec, o.x, o.y); + + // up + double vec[3]; + vec[0] = sz.at(0, 0); + vec[1] = sz.at(1, 0); + vec[2] = sz.at(2, 0); + Point2d up; + cam_model->vectorToPixel(vec, up.x, up.y); + + // fwd + vec[0] = sx.at(0, 0) + sz.at(0, 0); + vec[1] = sx.at(1, 0) + sz.at(1, 0); + vec[2] = sx.at(2, 0) + sz.at(2, 0); + Point2d fwd; + cam_model->vectorToPixel(vec, fwd.x, fwd.y); + + cv::line(rgb, 4 * up, 4 * fwd, colour, 2, cv::LINE_AA, 2); + cv::circle(rgb, 4 * up, 4 * 2, colour, 2, cv::LINE_AA, 2); +} + /// /// Draw rectangle corners. /// @@ -235,6 +277,6 @@ void drawRectCorners(Mat& rgb, const CameraModelPtr cam_model, Mat& cnrs, const /// void shadowText(Mat& img, std::string text, int px, int py, int r, int g, int b) { - cv::putText(img, text, cv::Point(px + 1, py + 1), CV_FONT_HERSHEY_SIMPLEX, 0.5, CV_RGB(0, 0, 0)); - cv::putText(img, text, cv::Point(px, py), CV_FONT_HERSHEY_SIMPLEX, 0.5, CV_RGB(r, g, b)); + cv::putText(img, text, cv::Point(px + 1, py + 1), cv::FONT_HERSHEY_SIMPLEX, 0.5, CV_RGB(0, 0, 0)); + cv::putText(img, text, cv::Point(px, py), cv::FONT_HERSHEY_SIMPLEX, 0.5, CV_RGB(r, g, b)); } diff --git a/src/geometry.cpp b/src/geometry.cpp index bf7b8a1..08acb11 100644 --- a/src/geometry.cpp +++ b/src/geometry.cpp @@ -102,7 +102,8 @@ bool circleFit_camModel(const vector& pix2d, const CameraModelPtr cam_m } /// -/// Compute camera-animal R+t transform from supplied square corners. +/// Compute animal-camera R+t transform from supplied square corners. +/// R is animal frame to camera frame transform. /// bool computeRtFromSquare(const CameraModelPtr cam_model, const Mat& ref_cnrs, const vector& cnrs, Mat& R, Mat& t) { @@ -125,7 +126,7 @@ bool computeRtFromSquare(const CameraModelPtr cam_model, const Mat& ref_cnrs, co /// Minimise transform from reference corners. SquareRT square(cnr_vecs, ref_cnrs); - double guess[6] = {0.0, 0.0, 0.0, 0.0, 0.0, 1.0}; + double guess[6] = {0.0, 0.0, 0.0, 0.0, 0.0, 100.0}; if (!R.empty() && !t.empty()) { // init guess if ((R.depth() != CV_64F) || (t.depth() != CV_64F)) { @@ -153,7 +154,7 @@ bool computeRtFromSquare(const CameraModelPtr cam_model, const Mat& ref_cnrs, co } /// -/// Wrapper for computing camera-animal R+t transform from XY square. +/// Wrapper for computing animal-camera R+t transform from XY square. /// Square normal = animal Z axis. Corner ordering is TL (+X,-Y), TR (+X,+Y), BR (-X,+Y), BL (-X,-Y). /// bool computeRtFromSquare_XY(const CameraModelPtr cam_model, const vector& cnrs, Mat& R, Mat& t) @@ -162,7 +163,7 @@ bool computeRtFromSquare_XY(const CameraModelPtr cam_model, const vector& cnrs, Mat& R, Mat& t) @@ -171,10 +172,28 @@ bool computeRtFromSquare_YZ(const CameraModelPtr cam_model, const vector& cnrs, Mat& R, Mat& t) { return computeRtFromSquare(cam_model, XZ_CNRS, cnrs, R, t); } + +/// +/// Wrapper for computing animal-camera R+t transform. +/// +bool computeRtFromSquare(const CameraModelPtr cam_model, const string ref_str, const vector& cnrs, Mat& R, Mat& t) +{ + bool ret = false; + if (ref_str == "xy") { + ret = computeRtFromSquare_XY(cam_model, cnrs, R, t); + } + else if (ref_str == "yz") { + ret = computeRtFromSquare_YZ(cam_model, cnrs, R, t); + } + else if (ref_str == "xz") { + ret = computeRtFromSquare_XZ(cam_model, cnrs, R, t); + } + return ret; +} \ No newline at end of file diff --git a/src/zhelpers.hpp b/src/zhelpers.hpp index 93949f9..0d04581 100644 --- a/src/zhelpers.hpp +++ b/src/zhelpers.hpp @@ -24,6 +24,7 @@ // Bring Windows MSVC up to C99 scratch #if (defined (WIN32)) +# include typedef unsigned long ulong; typedef unsigned int uint; typedef __int64 int64_t;