diff --git a/CMakeLists.txt b/CMakeLists.txt
index 7be8d0f..e2b0950 100755
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -53,7 +53,7 @@ add_message_files(
State.msg
StateGroundTruth.msg
UkfStats.msg
-
+ OpticalFlow.msg
)
## Generate services in the 'srv' folder
diff --git a/msg/OpticalFlow.msg b/msg/OpticalFlow.msg
new file mode 100644
index 0000000..b1b542f
--- /dev/null
+++ b/msg/OpticalFlow.msg
@@ -0,0 +1,10 @@
+# Message to be used by the optical flow code to republish them for
+# displaying in the web dashboard.
+
+Header header
+
+uint32 rows # a fraction of the corresponding image's height
+uint32 columns # a fraction of the corresponding image's width
+
+float64[] flow_x # x components of the optical flow field stored in a 1D array, the index of a vector at (x, y) is (y * width + x)
+float64[] flow_y # the corresponding y components of the vectors
\ No newline at end of file
diff --git a/scripts/analyze_flow.py b/scripts/analyze_flow.py
index 46df44f..b95b09e 100644
--- a/scripts/analyze_flow.py
+++ b/scripts/analyze_flow.py
@@ -4,6 +4,7 @@
import picamera.array
from pidrone_pkg.msg import State
from geometry_msgs.msg import TwistStamped
+from pidrone_pkg.msg import OpticalFlow
class AnalyzeFlow(picamera.array.PiMotionAnalysis):
@@ -28,6 +29,7 @@ def setup(self, camera_wh):
############
# Publisher:
self.twistpub = rospy.Publisher('/pidrone/picamera/twist', TwistStamped, queue_size=1)
+ self.flowpub = rospy.Publisher('/pidrone/picamera/flow', OpticalFlow, queue_size=1)
# Subscriber:
rospy.Subscriber("/pidrone/state", State, self.state_callback)
@@ -50,8 +52,18 @@ def analyse(self, a):
twist_msg.twist.linear.x = self.near_zero(x_motion)
twist_msg.twist.linear.y = - self.near_zero(y_motion)
+ # create the optical flow message
+ shape = np.shape(x)
+ flow_msg = OpticalFlow()
+ flow_msg.header.stamp = rospy.Time.now()
+ flow_msg.rows = shape[0]
+ flow_msg.columns = shape[1]
+ flow_msg.flow_x = x.flatten()
+ flow_msg.flow_y = y.flatten()
+
# Update and publish the twist message
self.twistpub.publish(twist_msg)
+ self.flowpub.publish(flow_msg)
def near_zero(self, n):
return 0 if abs(n) < 0.001 else n
diff --git a/web/index.html b/web/index.html
index 0ad5380..3e1ef3b 100755
--- a/web/index.html
+++ b/web/index.html
@@ -80,7 +80,12 @@
Camera Image
-
+
+
+
+
+
+
Make sure you run rosrun web_video_server web_video_server
and the image stream.
diff --git a/web/js/main.js b/web/js/main.js
index 5dbd76d..541210f 100644
--- a/web/js/main.js
+++ b/web/js/main.js
@@ -39,6 +39,7 @@ var startTime;
var heightChartPaused = false;
var showingUkfAnalysis = false;
var spanningFullWindow = false;
+var displayOpticalFlow = false;
function closeSession(){
console.log("Closing connections.");
@@ -235,6 +236,14 @@ function init() {
throttle_rate : 80
});
+ cameraFlowSub = new ROSLIB.Topic({
+ ros : ros,
+ name : '/pidrone/picamera/flow',
+ messageType : 'pidrone_pkg/OpticalFlow',
+ queue_length : 2,
+ throttle_rate : 100
+ });
+
emaIrSub = new ROSLIB.Topic({
ros : ros,
name : '/pidrone/state/ema',
@@ -447,6 +456,8 @@ function init() {
updateCameraPoseXYChart(message);
});
+ cameraFlowSub.subscribe(updateVisionFlow);
+
function updateGroundTruthXYChart(msg) {
xPos = msg.pose.position.x;
yPos = msg.pose.position.y;
@@ -505,6 +516,57 @@ function init() {
}
}
+ function updateVisionFlow(msg) {
+ if(displayOpticalFlow) {
+ var rows = msg.rows;
+ var cols = msg.columns; // TODO: why is there an extra one...?
+
+ // Get the [x, y] components of a flow vector at x, y with some scaling, flipping, constraining, etc...
+ function getVector(x, y) {
+ return [-constrain(2*msg.flow_x[(y * cols) + x], -12, 12), -constrain(2*msg.flow_y[(y * cols) + x], -12, 12)]
+ }
+
+ var ctx = document.getElementById('cameraFlowCanvas').getContext('2d');
+
+ ctx.clearRect(0, 0, 320, 240);
+
+ ctx.strokeStyle = "#ccff00"; // Draw direction indicator
+ ctx.beginPath();
+ for(var ix = 0; ix < cols; ix++) {
+ for(var iy = 0; iy < rows; iy++) {
+ var x = (ix * 16) + 8;
+ var y = (iy * 16) + 8;
+ var vec = getVector(ix, iy);
+ ctx.moveTo(x, y);
+ ctx.lineTo(x + vec[0], y + vec[1]);
+ ctx.moveTo(x-1, y);
+ ctx.lineTo(x+1, y);
+ ctx.moveTo(x, y-1);
+ ctx.lineTo(x, y+1);
+ }
+ }
+ ctx.stroke();
+
+ ctx.strokeStyle = "#000"; // Draw center marker
+ ctx.beginPath();
+ for(var ix = 0; ix < cols; ix++) {
+ for(var iy = 0; iy < rows; iy++) {
+ var x = (ix * 16) + 8;
+ var y = (iy * 16) + 8;
+ ctx.moveTo(x-1, y);
+ ctx.lineTo(x+1, y);
+ ctx.moveTo(x, y-1);
+ ctx.lineTo(x, y+1);
+ }
+ }
+ ctx.stroke();
+ }
+ }
+
+ function constrain(num, min, max) {
+ return num > max ? max : (num < min ? min : num);
+ }
+
function updateCameraPoseXYChart(msg) {
xPos = msg.pose.position.x;
yPos = msg.pose.position.y;
@@ -1328,6 +1390,16 @@ function togglePauseXYChart(btn) {
console.log('Pause button pressed')
}
+function toggleOpticalFlow(btn) {
+ displayOpticalFlow = !displayOpticalFlow;
+ if (displayOpticalFlow) {
+ btn.value = 'Hide Optical Flow'
+ } else {
+ btn.value = 'Show Optical Flow'
+ document.getElementById('cameraFlowCanvas').getContext('2d').clearRect(0, 0, 320, 240);
+ }
+}
+
function publishVelocityMode() {
positionMsg.data = false;
positionPub.publish(positionMsg)