diff --git a/AreaLearningJava/app/build.gradle b/AreaLearningJava/app/build.gradle
index 160ea9db..ec23419f 100644
--- a/AreaLearningJava/app/build.gradle
+++ b/AreaLearningJava/app/build.gradle
@@ -36,6 +36,7 @@ if (project.hasProperty("Tango.catkin_devel_prefix")) {
}
dependencies {
+ compile fileTree(dir: external_lib_prefix + '/jar', include: ['**/*.jar'])
compile(name: 'TangoUtils', ext: 'aar')
compile 'org.rajawali3d:rajawali:1.0.294-SNAPSHOT@aar'
}
diff --git a/AugmentedRealitySample/app/build.gradle b/AugmentedRealitySample/app/build.gradle
index 857e16cb..b293a993 100644
--- a/AugmentedRealitySample/app/build.gradle
+++ b/AugmentedRealitySample/app/build.gradle
@@ -37,7 +37,8 @@ if (project.hasProperty("Tango.catkin_devel_prefix")) {
}
dependencies {
- compile(name: 'TangoUtils', ext: 'aar')
+ compile (name: 'TangoUtils', ext: 'aar')
+ compile (name: 'tango_support_java_lib', ext: 'aar')
compile 'org.rajawali3d:rajawali:1.0.294-SNAPSHOT@aar'
}
diff --git a/AugmentedRealitySample/app/src/main/java/com/projecttango/experiments/augmentedrealitysample/AugmentedRealityActivity.java b/AugmentedRealitySample/app/src/main/java/com/projecttango/experiments/augmentedrealitysample/AugmentedRealityActivity.java
index 4af5f72f..38cbacf2 100644
--- a/AugmentedRealitySample/app/src/main/java/com/projecttango/experiments/augmentedrealitysample/AugmentedRealityActivity.java
+++ b/AugmentedRealitySample/app/src/main/java/com/projecttango/experiments/augmentedrealitysample/AugmentedRealityActivity.java
@@ -16,11 +16,13 @@
package com.projecttango.experiments.augmentedrealitysample;
import java.util.ArrayList;
-import java.util.concurrent.atomic.AtomicBoolean;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
+import android.util.Log;
+import android.view.MotionEvent;
+import android.view.View;
import android.widget.Toast;
import com.google.atap.tangoservice.Tango;
@@ -32,11 +34,15 @@
import com.google.atap.tangoservice.TangoPoseData;
import com.google.atap.tangoservice.TangoXyzIjData;
import com.projecttango.rajawali.ar.TangoRajawaliView;
+import com.projecttango.tangosupport.TangoSupport;
/**
* An example showing how to build a very simple augmented reality application in Java.
* It uses Rajawali to do the rendering through the utility classes
* TangoRajawaliRenderer
and TangoRajawaliView
from TangoUtils.
+ * It also uses the TangoSupportLibrary to do plane fitting using the PointCloud data. Whenever the
+ * user clicks on the camera display, plane detection will be done on the surface closest to the
+ * click location and a 3D object will be placed in the scene anchored in that location.
*
* TangoRajawaliView is used in the same way as the TangoCamaraPreview: we first need initialize the
* TangoRajawaliView class with the activity's context and connect to the camera we want by using
@@ -49,12 +55,15 @@
* The implementation of the 3D world is done by subclassing the Renderer, just like any other
* Rajawali application.
*
- * Note that it is important to include the KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION configuration parameter in
- * order to achieve best results synchronizing the Rajawali virtual world with the RGB camera.
+ * Note that it is important to include the KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION configuration
+ * parameter in order to achieve best results synchronizing the Rajawali virtual world with
+ * the RGB camera.
*/
-public class AugmentedRealityActivity extends Activity {
+public class AugmentedRealityActivity extends Activity implements View.OnTouchListener {
+ private static final String TAG = "AugmentedRealityActiv";
private TangoRajawaliView mGLView;
private AugmentedRealityRenderer mRenderer;
+ private PointCloudManager mPointCloudManager;
private Tango mTango;
private boolean mIsConnected;
private boolean mIsPermissionGranted;
@@ -65,6 +74,7 @@ protected void onCreate(Bundle savedInstanceState) {
mGLView = new TangoRajawaliView(this);
mRenderer = new AugmentedRealityRenderer(this);
mGLView.setSurfaceRenderer(mRenderer);
+ mGLView.setOnTouchListener(this);
mTango = new Tango(this);
startActivityForResult(
Tango.getRequestPermissionIntent(Tango.PERMISSIONTYPE_MOTION_TRACKING),
@@ -91,17 +101,17 @@ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// Augmented reality view and renderer
private void startAugmentedreality() {
if (!mIsConnected) {
+ mIsConnected = true;
// Connect to color camera
- mGLView.connectToTangoCamera(mTango,
- TangoCameraIntrinsics.TANGO_CAMERA_COLOR);
+ mGLView.connectToTangoCamera(mTango, TangoCameraIntrinsics.TANGO_CAMERA_COLOR);
// Use default configuration for Tango Service, plus low latency IMU integration.
TangoConfig config = mTango.getConfig(TangoConfig.CONFIG_TYPE_DEFAULT);
// NOTE: low latency integration is necessary to achieve a precise alignment of
// virtual objects with the RBG image and produce a good AR effect.
config.putBoolean(TangoConfig.KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION, true);
+ config.putBoolean(TangoConfig.KEY_BOOLEAN_DEPTH, true);
mTango.connect(config);
- mIsConnected = true;
// No need to add any coordinate frame pairs since we are not using
// pose data. So just initialize.
@@ -123,7 +133,14 @@ public void onFrameAvailable(int cameraId) {
@Override
public void onXyzIjAvailable(TangoXyzIjData xyzIj) {
- // We are not using OnPoseAvailable for this app
+ // Get the device pose at the time the point cloud was acquired
+ TangoCoordinateFramePair framePair = new TangoCoordinateFramePair(
+ TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE,
+ TangoPoseData.COORDINATE_FRAME_DEVICE);
+ TangoPoseData cloudPose = mTango.getPoseAtTime(xyzIj.timestamp, framePair);
+
+ // Save the cloud and point data for later use
+ mPointCloudManager.updateXyzIjData(xyzIj, cloudPose);
}
@Override
@@ -131,9 +148,40 @@ public void onTangoEvent(TangoEvent event) {
// We are not using OnPoseAvailable for this app
}
});
+
+ // Get extrinsics from device for use in transforms
+ // This needs to be done after connecting Tango and listeners
+ setupExtrinsics();
+
+ // Set-up point cloud plane fitting library helper class
+ mPointCloudManager = new PointCloudManager(mTango.getCameraIntrinsics(
+ TangoCameraIntrinsics.TANGO_CAMERA_COLOR));
+
}
}
+ /**
+ * Calculates and stores the fixed transformations between the device and the various sensors
+ * to be used later for transformations between frames.
+ */
+ private void setupExtrinsics() {
+ // Create Camera to IMU Transform
+ TangoCoordinateFramePair framePair = new TangoCoordinateFramePair();
+ framePair.baseFrame = TangoPoseData.COORDINATE_FRAME_IMU;
+ framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR;
+ TangoPoseData imuTrgbPose = mTango.getPoseAtTime(0.0, framePair);
+
+ // Create Device to IMU Transform
+ framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_DEVICE;
+ TangoPoseData imuTdevicePose = mTango.getPoseAtTime(0.0, framePair);
+
+ // Create Depth camera to IMU Transform
+ framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_DEPTH;
+ TangoPoseData imuTdepthPose = mTango.getPoseAtTime(0.0, framePair);
+
+ mRenderer.setupExtrinsics(imuTdevicePose, imuTrgbPose, imuTdepthPose);
+ }
+
@Override
protected void onPause() {
@@ -152,4 +200,38 @@ protected void onResume() {
startAugmentedreality();
}
}
+
+ @Override
+ public boolean onTouch(View view, MotionEvent motionEvent) {
+ if (motionEvent.getAction() == MotionEvent.ACTION_UP) {
+ // Calculate click location in u,v (0;1) coordinates
+ float u = motionEvent.getX() / view.getWidth();
+ float v = motionEvent.getY() / view.getHeight();
+
+ try {
+ doFitPlane(u, v);
+ } catch (Throwable t) {
+ Log.e(TAG, "Exception measuring nomral", t);
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Use the TangoSupport library with point cloud data to calculate the plane of
+ * the world feature pointed at the location the camera is looking at and update the
+ * renderer to show a 3D object in that location.
+ */
+ private void doFitPlane(float u, float v) {
+ // Get the current device pose
+ TangoCoordinateFramePair framePair = new TangoCoordinateFramePair(
+ TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE,
+ TangoPoseData.COORDINATE_FRAME_DEVICE);
+ TangoPoseData devicePose = mTango.getPoseAtTime(0.0, framePair);
+
+ // Perform plane fitting with the latest available point cloud data
+ TangoSupport.IntersectionPointPlaneModelPair planeModel =
+ mPointCloudManager.fitPlane(u, v, devicePose, mRenderer.getPoseCalculator());
+ mRenderer.updateObjectPose(planeModel.intersectionPoint, planeModel.planeModel, devicePose);
+ }
}
diff --git a/AugmentedRealitySample/app/src/main/java/com/projecttango/experiments/augmentedrealitysample/AugmentedRealityRenderer.java b/AugmentedRealitySample/app/src/main/java/com/projecttango/experiments/augmentedrealitysample/AugmentedRealityRenderer.java
index 3d9fccb7..e92371d6 100644
--- a/AugmentedRealitySample/app/src/main/java/com/projecttango/experiments/augmentedrealitysample/AugmentedRealityRenderer.java
+++ b/AugmentedRealitySample/app/src/main/java/com/projecttango/experiments/augmentedrealitysample/AugmentedRealityRenderer.java
@@ -18,14 +18,19 @@
import android.content.Context;
import android.view.MotionEvent;
+import com.google.atap.tangoservice.TangoPoseData;
+import com.projecttango.rajawali.Pose;
+import com.projecttango.rajawali.ScenePoseCalcuator;
import com.projecttango.rajawali.ar.TangoRajawaliRenderer;
import org.rajawali3d.Object3D;
import org.rajawali3d.lights.DirectionalLight;
import org.rajawali3d.materials.Material;
import org.rajawali3d.materials.methods.DiffuseMethod;
-import org.rajawali3d.primitives.NPrism;
-import org.rajawali3d.primitives.Sphere;
+import org.rajawali3d.materials.textures.ATexture;
+import org.rajawali3d.materials.textures.Texture;
+import org.rajawali3d.math.vector.Vector3;
+import org.rajawali3d.primitives.Cube;
/**
* Very simple example augmented reality renderer which displays two objects in a fixed position
@@ -39,6 +44,13 @@
*/
public class AugmentedRealityRenderer extends TangoRajawaliRenderer {
+ private static final float CUBE_SIDE_LENGTH = 0.5f;
+
+ private Pose mPlanePose;
+ private boolean mPlanePoseUpdated = false;
+
+ private Object3D mObject;
+
public AugmentedRealityRenderer(Context context) {
super(context);
}
@@ -55,23 +67,61 @@ protected void initScene() {
light.setPosition(3, 2, 4);
getCurrentScene().addLight(light);
- // Set-up a material: green with application of the light
+ // Set-up a material: green with application of the light and instructions
Material material = new Material();
material.setColor(0xff009900);
+ try {
+ Texture t = new Texture("instructions", R.drawable.instructions);
+ material.addTexture(t);
+ } catch (ATexture.TextureException e) {
+ e.printStackTrace();
+ }
+ material.setColorInfluence(0.1f);
material.enableLighting(true);
material.setDiffuseMethod(new DiffuseMethod.Lambert());
- // Build a pyramid and place it roughly in front and a bit to the right
- Object3D object1 = new NPrism(4, 0f, 0.2f, 0.2f);
- object1.setMaterial(material);
- object1.setPosition(-0.25, 0, -1);
- getCurrentScene().addChild(object1);
-
- // Build a sphere and place it roughly in front and a bit to the left
- object1 = new Sphere(0.1f, 24, 24);
- object1.setMaterial(material);
- object1.setPosition(0.25, 0, -1);
- getCurrentScene().addChild(object1);
+ // Build a Cube and place it initially in the origin
+ mObject = new Cube(CUBE_SIDE_LENGTH);
+ mObject.setMaterial(material);
+ mObject.setPosition(0, 0, -3);
+ mObject.setRotation(Vector3.Axis.Z, 180);
+ getCurrentScene().addChild(mObject);
+ }
+
+ @Override
+ protected void onRender(long ellapsedRealtime, double deltaTime) {
+ super.onRender(ellapsedRealtime, deltaTime);
+
+ synchronized (this) {
+ if (mPlanePoseUpdated == true) {
+ mPlanePoseUpdated = false;
+ // Place the 3D object in the location of the detected plane
+ mObject.setPosition(mPlanePose.getPosition());
+ mObject.setOrientation(mPlanePose.getOrientation());
+ // Move it forward by half of the size of the cube to make it flush with the plane
+ // surface
+ mObject.moveForward(CUBE_SIDE_LENGTH / 2.0f);
+ }
+ }
+ }
+
+ /**
+ * Update the 3D object based on the provided measurement point, normal (in depth frame) and
+ * device pose at the time of measurement.
+ */
+ public synchronized void updateObjectPose(double[] point, double[] normal,
+ TangoPoseData devicePose) {
+ mPlanePose = mScenePoseCalcuator.planeFitToOpenGLPose(point, normal, devicePose);
+ mPlanePoseUpdated = true;
+ }
+
+ /**
+ * Provide access to scene calculator helper class to perform necessary transformations.
+ * NOTE: This won't be necessary once transformation functions are available through the
+ * support library
+ */
+ public ScenePoseCalcuator getPoseCalculator() {
+ return mScenePoseCalcuator;
}
@Override
diff --git a/AugmentedRealitySample/app/src/main/java/com/projecttango/experiments/augmentedrealitysample/PointCloudManager.java b/AugmentedRealitySample/app/src/main/java/com/projecttango/experiments/augmentedrealitysample/PointCloudManager.java
new file mode 100644
index 00000000..69048128
--- /dev/null
+++ b/AugmentedRealitySample/app/src/main/java/com/projecttango/experiments/augmentedrealitysample/PointCloudManager.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2014 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.projecttango.experiments.augmentedrealitysample;
+
+import com.google.atap.tangoservice.TangoCameraIntrinsics;
+import com.google.atap.tangoservice.TangoPoseData;
+import com.google.atap.tangoservice.TangoXyzIjData;
+import com.projecttango.rajawali.ScenePoseCalcuator;
+import com.projecttango.tangosupport.TangoSupport;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * This helper class keeps a copy of the point cloud data received in callbacks for use with the
+ * plane fitting function.
+ * It is implemented to be thread safe so that the caller (the Activity) doesn't need to worry
+ * about locking between the Tango callback and UI threads.
+ */
+public class PointCloudManager {
+ private static final String TAG = "PointCloudManager";
+
+ private final TangoCameraIntrinsics mTangoCameraIntrinsics;
+ private final TangoXyzIjData mXyzIjData;
+ private TangoPoseData mDevicePoseAtCloudTime;
+
+ public PointCloudManager(TangoCameraIntrinsics intrinsics) {
+ mXyzIjData = new TangoXyzIjData();
+ mTangoCameraIntrinsics = intrinsics;
+ }
+
+ /**
+ * Update the current cloud data with the provided xyzIjData from a Tango callback.
+ *
+ * @param from The point cloud data
+ * @param xyzIjPose The device pose with respect to start of service at the time
+ * the point cloud was acquired
+ */
+ public synchronized void updateXyzIjData(TangoXyzIjData from, TangoPoseData xyzIjPose) {
+ mDevicePoseAtCloudTime = xyzIjPose;
+
+ if (mXyzIjData.xyz == null || mXyzIjData.xyz.capacity() < from.xyzCount * 3) {
+ mXyzIjData.xyz = ByteBuffer.allocateDirect(from.xyzCount * 3 * 4)
+ .order(ByteOrder.nativeOrder()).asFloatBuffer();
+ } else {
+ mXyzIjData.xyz.rewind();
+ }
+
+ mXyzIjData.xyzCount = from.xyzCount;
+ mXyzIjData.timestamp = from.timestamp;
+
+ from.xyz.rewind();
+ mXyzIjData.xyz.put(from.xyz);
+ mXyzIjData.xyz.rewind();
+ from.xyz.rewind();
+ }
+
+ /**
+ * Calculate the plane that best fits the current point cloud at the provided u,v coordinates
+ * in the 2D projection of the point cloud data (i.e.: point cloud image).
+ *
+ * @param u u (horizontal) component of the click location
+ * @param v v (vertical) component of the click location
+ * @param devicePoseAtClickTime Device pose at the time this operation is requested
+ * @param poseCalcuator ScenePoseCalculator helper instance to calculate transforms
+ * @return The point and plane model, in depth sensor frame
+ */
+ public synchronized TangoSupport.IntersectionPointPlaneModelPair fitPlane(float u, float v,
+ TangoPoseData devicePoseAtClickTime, ScenePoseCalcuator poseCalcuator) {
+
+ // We need to calculate the transform between the color camera at the time the user clicked
+ // and the depth camera at the time the depth cloud was acquired.
+ // This operation is currently implemented in the provided ScenePoseCalculator helper
+ // class. In the future, the support library will provide a method for this calculation.
+ TangoPoseData colorCameraTDepthCameraWithTime
+ = poseCalcuator.calculateColorCameraTDepthWithTime(devicePoseAtClickTime, mDevicePoseAtCloudTime);
+
+ return TangoSupport.fitPlaneModelNearClick(mXyzIjData, mTangoCameraIntrinsics,
+ colorCameraTDepthCameraWithTime, u, v);
+ }
+}
diff --git a/AugmentedRealitySample/app/src/main/res/drawable-xxhdpi/instructions.png b/AugmentedRealitySample/app/src/main/res/drawable-xxhdpi/instructions.png
new file mode 100644
index 00000000..c3b8ea21
Binary files /dev/null and b/AugmentedRealitySample/app/src/main/res/drawable-xxhdpi/instructions.png differ
diff --git a/MotionTrackingJava/app/build.gradle b/MotionTrackingJava/app/build.gradle
index fb458b5c..3202a884 100644
--- a/MotionTrackingJava/app/build.gradle
+++ b/MotionTrackingJava/app/build.gradle
@@ -37,6 +37,7 @@ if (project.hasProperty("Tango.catkin_devel_prefix")) {
}
dependencies {
+ compile fileTree(dir: external_lib_prefix + '/jar', include: ['**/*.jar'])
compile (name: 'TangoUtils', ext: 'aar')
compile 'org.rajawali3d:rajawali:1.0.294-SNAPSHOT@aar'
}
diff --git a/PointCloudJava/app/src/main/java/com/projecttango/experiments/javapointcloud/PointCloudActivity.java b/PointCloudJava/app/src/main/java/com/projecttango/experiments/javapointcloud/PointCloudActivity.java
index b586934c..bcb6a7f2 100644
--- a/PointCloudJava/app/src/main/java/com/projecttango/experiments/javapointcloud/PointCloudActivity.java
+++ b/PointCloudJava/app/src/main/java/com/projecttango/experiments/javapointcloud/PointCloudActivity.java
@@ -337,21 +337,19 @@ public void run() {
}).start();
}
- private void setupExtrinsics(){
+ private void setupExtrinsics() {
TangoCoordinateFramePair framePair = new TangoCoordinateFramePair();
framePair.baseFrame = TangoPoseData.COORDINATE_FRAME_IMU;
framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR;
TangoPoseData imuTColorCameraPose = mTango.getPoseAtTime(0.0,framePair);
- framePair.baseFrame = TangoPoseData.COORDINATE_FRAME_IMU;
framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_DEPTH;
TangoPoseData imuTDepthCameraPose = mTango.getPoseAtTime(0.0,framePair);
- framePair.baseFrame = TangoPoseData.COORDINATE_FRAME_IMU;
framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_DEVICE;
TangoPoseData imuTDevicePose = mTango.getPoseAtTime(0.0,framePair);
- mRenderer.setupExtrinsics(imuTColorCameraPose, imuTDepthCameraPose, imuTDevicePose);
+ mRenderer.setupExtrinsics(imuTDevicePose, imuTColorCameraPose, imuTDepthCameraPose);
}
/*
diff --git a/PointCloudJava/app/src/main/java/com/projecttango/experiments/javapointcloud/PointCloudRajawaliRenderer.java b/PointCloudJava/app/src/main/java/com/projecttango/experiments/javapointcloud/PointCloudRajawaliRenderer.java
index a6181eb8..924254e0 100644
--- a/PointCloudJava/app/src/main/java/com/projecttango/experiments/javapointcloud/PointCloudRajawaliRenderer.java
+++ b/PointCloudJava/app/src/main/java/com/projecttango/experiments/javapointcloud/PointCloudRajawaliRenderer.java
@@ -132,15 +132,14 @@ public void setThirdPersonView() {
/**
* Sets the extrinsics between different sensors of a Tango Device
+ *
+ * @param imuTDevicePose : Pose transformation between Device and IMU sensor.
* @param imuTColorCameraPose : Pose transformation between Color camera and IMU sensor.
* @param imuTDepthCameraPose : Pose transformation between Depth camera and IMU sensor.
- * @param imuTDevicePose : Pose transformation between Device and IMU sensor.
*/
- public void setupExtrinsics(TangoPoseData imuTColorCameraPose, TangoPoseData imuTDepthCameraPose, TangoPoseData imuTDevicePose) {
- Matrix4 imuTColorCamera = ScenePoseCalcuator.tangoPoseToMatrix(imuTColorCameraPose);
- Matrix4 imuTDevice = ScenePoseCalcuator.tangoPoseToMatrix(imuTDevicePose);
- Matrix4 imuTDepthCamera = ScenePoseCalcuator.tangoPoseToMatrix(imuTDepthCameraPose);
- mScenePoseCalcuator.setupExtrinsics(imuTColorCamera, imuTDepthCamera, imuTDevice);
+ public void setupExtrinsics(TangoPoseData imuTDevicePose, TangoPoseData imuTColorCameraPose,
+ TangoPoseData imuTDepthCameraPose) {
+ mScenePoseCalcuator.setupExtrinsics(imuTDevicePose, imuTColorCameraPose, imuTDepthCameraPose);
}
}
diff --git a/TangoReleaseLibs/aar/TangoUtils.aar b/TangoReleaseLibs/aar/TangoUtils.aar
index 98ae8646..14fdece2 100644
Binary files a/TangoReleaseLibs/aar/TangoUtils.aar and b/TangoReleaseLibs/aar/TangoUtils.aar differ
diff --git a/TangoReleaseLibs/aar/tango-ux-support-library.aar b/TangoReleaseLibs/aar/tango-ux-support-library.aar
index b026dcf6..9d7a599d 100644
Binary files a/TangoReleaseLibs/aar/tango-ux-support-library.aar and b/TangoReleaseLibs/aar/tango-ux-support-library.aar differ
diff --git a/TangoReleaseLibs/aar/tango_support_java_lib.aar b/TangoReleaseLibs/aar/tango_support_java_lib.aar
new file mode 100644
index 00000000..b2c78b9b
Binary files /dev/null and b/TangoReleaseLibs/aar/tango_support_java_lib.aar differ
diff --git a/TangoReleaseLibs/jar/tango_java_lib.jar b/TangoReleaseLibs/jar/tango_java_lib.jar
index 9fcff687..345bbaf8 100644
Binary files a/TangoReleaseLibs/jar/tango_java_lib.jar and b/TangoReleaseLibs/jar/tango_java_lib.jar differ
diff --git a/TangoUtils/app/src/main/java/com/projecttango/rajawali/Pose.java b/TangoUtils/app/src/main/java/com/projecttango/rajawali/Pose.java
index 04f51137..21f18a3a 100644
--- a/TangoUtils/app/src/main/java/com/projecttango/rajawali/Pose.java
+++ b/TangoUtils/app/src/main/java/com/projecttango/rajawali/Pose.java
@@ -37,4 +37,8 @@ public Quaternion getOrientation() {
public Vector3 getPosition() {
return mPosition;
}
+
+ public String toString() {
+ return "p:" + mPosition + ",q:" + mOrientation;
+ }
}
diff --git a/TangoUtils/app/src/main/java/com/projecttango/rajawali/ScenePoseCalcuator.java b/TangoUtils/app/src/main/java/com/projecttango/rajawali/ScenePoseCalcuator.java
index 3559be51..c4f28c2e 100644
--- a/TangoUtils/app/src/main/java/com/projecttango/rajawali/ScenePoseCalcuator.java
+++ b/TangoUtils/app/src/main/java/com/projecttango/rajawali/ScenePoseCalcuator.java
@@ -15,6 +15,8 @@
*/
package com.projecttango.rajawali;
+import android.util.Log;
+
import com.google.atap.tangoservice.Tango;
import com.google.atap.tangoservice.TangoCoordinateFramePair;
import com.google.atap.tangoservice.TangoPoseData;
@@ -58,11 +60,15 @@ public class ScenePoseCalcuator {
0, 0, 0, 1
});
+ // Up vector in the Tango Start of Service and Area Description frame
+ public static final Vector3 TANGO_WORLD_UP = new Vector3(0, 0, 1);
+
// Transformation from the position of the Depth camera to the Device frame
private Matrix4 mDeviceTDepthCamera;
// Transformation from the position of the Color Camera to the Device frame
private Matrix4 mDeviceTColorCamera;
+
/**
* Converts from TangoPoseData to a Matrix4 for transformations.
*/
@@ -72,21 +78,63 @@ public static Matrix4 tangoPoseToMatrix(TangoPoseData tangoPose) {
Quaternion q = new Quaternion(tangoPose.rotation[3], tangoPose.rotation[0],
tangoPose.rotation[1], tangoPose.rotation[2]);
// NOTE: Rajawali Quaternions use a left-hand rotation around the axis convention
- q.inverse();
+ q.conjugate();
Matrix4 m = new Matrix4();
m.setAll(v, new Vector3(1, 1, 1), q);
return m;
}
/**
- * Given a TangoPoseData object, calculate the corresponding position and orientation for a
- * OpenGL Scene Camera in the Rajawali world.
+ * Converts a transform in Matrix4 format to TangoPoseData.
+ */
+ public static TangoPoseData matrixToTangoPose(Matrix4 transform) {
+ // Get translation and rotation components from the transformation matrix
+ Vector3 p = transform.getTranslation();
+ Quaternion q = new Quaternion();
+ q.fromMatrix(transform);
+ // NOTE: Rajawali Quaternions use a left-hand rotation around the axis convention
+ q.conjugate();
+
+ TangoPoseData tangoPose = new TangoPoseData();
+ double[] t = tangoPose.translation = new double[3];
+ t[0] = p.x;
+ t[1] = p.y;
+ t[2] = p.z;
+ double[] r = tangoPose.rotation = new double[4];
+ r[0] = q.x;
+ r[1] = q.y;
+ r[2] = q.z;
+ r[3] = q.w;
+
+ return tangoPose;
+ }
+
+ /**
+ * Helper method to extract a Pose object from a transformation matrix taking into account
+ * Rajawali conventions.
+ */
+ public static Pose matrixToPose(Matrix4 m) {
+ // Get translation and rotation components from the transformation matrix
+ Vector3 p = m.getTranslation();
+ Quaternion q = new Quaternion();
+ q.fromMatrix(m);
+
+ // NOTE: Rajawali Quaternions use a left-hand rotation around the axis convention
+ q.conjugate();
+
+ return new Pose(p, q);
+ }
+
+ /**
+ * Given a pose in start of service or area description frame, calculate the corresponding
+ * position and orientation for a OpenGL Scene Camera in the Rajawali world.
*/
public Pose toOpenGLCameraPose(TangoPoseData tangoPose) {
// We can't do this calculation until Extrinsics are set-up
if (mDeviceTColorCamera == null) {
throw new RuntimeException("You must call setupExtrinsics first.");
}
+
Matrix4 startServiceTdevice = tangoPoseToMatrix(tangoPose);
// Get device pose in OpenGL world frame
@@ -96,16 +144,7 @@ public Pose toOpenGLCameraPose(TangoPoseData tangoPose) {
Matrix4 openglWorldTOpenglCamera = openglTDevice.multiply(mDeviceTColorCamera).
multiply(COLOR_CAMERA_T_OPENGL_CAMERA);
- // Get translation and rotation components from the resulting transformation matrix
- Vector3 p = openglWorldTOpenglCamera.getTranslation();
- Quaternion q = new Quaternion();
- q.fromMatrix(openglWorldTOpenglCamera);
-
- // NOTE: Rajawali Quaternions use a left-hand rotation around the axis convention
- q.inverse();
-
- // Notify of the new Camera information
- return new Pose(p, q);
+ return matrixToPose(openglWorldTOpenglCamera);
}
/**
@@ -114,15 +153,17 @@ public Pose toOpenGLCameraPose(TangoPoseData tangoPose) {
*/
public Pose toOpenGLPointCloudPose(TangoPoseData tangoPose) {
+ // We can't do this calculation until Extrinsics are set-up
+ if (mDeviceTDepthCamera == null) {
+ throw new RuntimeException("You must call setupExtrinsics first.");
+ }
+
//conversion matrix to put point cloud data in Rajwali/Opengl Coordinate system.
Matrix4 invertYandZMatrix = new Matrix4(new double[]{1.0f, 0.0f, 0.0f, 0.0f,
0.0f, -1.0f, 0.0f, 0.0f,
0.0f, 0.0f, -1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f });
- // We can't do this calculation until Extrinsics are set-up
- if (mDeviceTDepthCamera == null) {
- throw new RuntimeException("You must call setupExtrinsics first.");
- }
+
Matrix4 startServiceTdevice = tangoPoseToMatrix(tangoPose);
// Get device pose in OpenGL world frame
@@ -132,21 +173,12 @@ public Pose toOpenGLPointCloudPose(TangoPoseData tangoPose) {
Matrix4 openglWorldTOpenglCamera = openglTDevice.multiply(mDeviceTDepthCamera).
multiply(DEPTH_CAMERA_T_OPENGL_CAMERA).multiply(invertYandZMatrix);
- // Get translation and rotation components from the resulting transformation matrix
- Vector3 p = openglWorldTOpenglCamera.getTranslation();
- Quaternion q = new Quaternion();
- q.fromMatrix(openglWorldTOpenglCamera);
-
- // NOTE: Rajawali Quaternions use a left-hand rotation around the axis convention
- q.inverse();
-
- // Notify of the new Camera information
- return new Pose(p, q);
+ return matrixToPose(openglWorldTOpenglCamera);
}
/**
- * Given a TangoPoseData object, calculate the corresponding position and orientation for a
- * 3D object in the Rajawali world.
+ * Given a pose in start of service or area description frame calculate the corresponding
+ * position and orientation for a 3D object in the Rajawali world.
*/
static public Pose toOpenGLPose(TangoPoseData tangoPose) {
Matrix4 start_service_T_device = tangoPoseToMatrix(tangoPose);
@@ -154,29 +186,57 @@ static public Pose toOpenGLPose(TangoPoseData tangoPose) {
// Get device pose in OpenGL world frame
Matrix4 opengl_world_T_device = OPENGL_T_TANGO_WORLD.clone().multiply(start_service_T_device);
- // Get translation and rotation components from the resulting transformation matrix
- Vector3 p = opengl_world_T_device.getTranslation();
- Quaternion q = new Quaternion();
- q.fromMatrix(opengl_world_T_device);
+ return matrixToPose(opengl_world_T_device);
+ }
- // NOTE: Rajawali Quaternions use a left-hand rotation around the axis convention
- q.inverse();
+ /**
+ * Given a point and a normal in depth camera frame and the device pose in start of service
+ * frame at the time the point and normal were measured, calculate a TangoPoseData object in
+ * Tango start of service frame.
+ *
+ * @param point Point in depth frame where the plane has been detected.
+ * @param normal Normal of the detected plane
+ * @param tangoPose Device pose with respect to start of service at the time the plane was
+ * fitted
+ */
+ public Pose planeFitToOpenGLPose(double[] point, double[] normal, TangoPoseData tangoPose) {
+ if (mDeviceTDepthCamera == null) {
+ throw new RuntimeException("You must call setupExtrinsics first");
+ }
- // Notify of the new Camera information
- return new Pose(p, q);
- }
+ Matrix4 startServiceTdevice = tangoPoseToMatrix(tangoPose);
+
+ // Calculate the UP vector in the Depth frame at the provided measurement pose
+ Vector3 depthUp = TANGO_WORLD_UP.clone();
+ startServiceTdevice.clone().multiply(mDeviceTDepthCamera).inverse().rotateVector(depthUp);
+
+ // Calculate the transform in depth frame corresponding to the plane fitting information
+ Matrix4 depthTplane = matrixFromPointNormalUp(point, normal, depthUp);
- public void setupExtrinsics(Matrix4 imutColorCamera, Matrix4 imuTDepthCamera, Matrix4 imuTDevice){
- mDeviceTDepthCamera = imuTDevice.clone().inverse().clone().multiply(imuTDepthCamera);
- setupExtrinsics(imutColorCamera,imuTDevice);
+ // Convert to OpenGL frame
+ Matrix4 openglWorldTplane = OPENGL_T_TANGO_WORLD.clone().multiply(startServiceTdevice)
+ .multiply(mDeviceTDepthCamera).multiply(depthTplane);
+
+ return matrixToPose(openglWorldTplane);
}
/**
- * Calculate and record the transformations between the Tango color camera and the device.
+ * Configure the scene pose calculator with the transformation between the selected
+ * camera and the device.
+ * Note that this requires going through the IMU since the Tango service can't calculate
+ * the transform between the camera and the device directly.
*/
- public void setupExtrinsics(Matrix4 imutColorCamera, Matrix4 imuTDevice) {
- // Combine both to get the transform from the device pose to the RGB camera pose
- mDeviceTColorCamera = imuTDevice.clone().inverse().clone().multiply(imutColorCamera);
+ public void setupExtrinsics(TangoPoseData imuTDevicePose, TangoPoseData imuTColorCameraPose,
+ TangoPoseData imuTDepthCameraPose) {
+ Matrix4 deviceTImu = ScenePoseCalcuator.tangoPoseToMatrix(imuTDevicePose).inverse();
+ Matrix4 imutColorCamera = ScenePoseCalcuator.tangoPoseToMatrix(imuTColorCameraPose);
+ Matrix4 imuTDepthCamera = ScenePoseCalcuator.tangoPoseToMatrix(imuTDepthCameraPose);
+ mDeviceTDepthCamera = deviceTImu.clone().multiply(imuTDepthCamera);
+ mDeviceTColorCamera = deviceTImu.multiply(imutColorCamera);
+ }
+
+ private void throwExceptionIfNoExtrinsics() {
+
}
/**
@@ -205,4 +265,63 @@ public static Matrix4 calculateProjectionMatrix(int width, int height, double fx
near, far);
return new Matrix4(m);
}
+
+
+ /**
+ * Calculates a transformation matrix based on a point, a normal and the up gravity vector.
+ * The coordinate frame of the target transformation will be Z forward, X left, Y up.
+ */
+ private static Matrix4 matrixFromPointNormalUp(double[] point, double[] normal, Vector3 up) {
+ Vector3 zAxis = new Vector3(normal);
+ zAxis.normalize();
+ Vector3 xAxis = new Vector3();
+ xAxis.crossAndSet(up, zAxis);
+ xAxis.normalize();
+ Vector3 yAxis = new Vector3();
+ yAxis.crossAndSet(xAxis, zAxis);
+ yAxis.normalize();
+
+ double[] rot = new double[16];
+
+ rot[Matrix4.M00] = xAxis.x;
+ rot[Matrix4.M10] = xAxis.y;
+ rot[Matrix4.M20] = xAxis.z;
+
+ rot[Matrix4.M01] = yAxis.x;
+ rot[Matrix4.M11] = yAxis.y;
+ rot[Matrix4.M21] = yAxis.z;
+
+ rot[Matrix4.M02] = zAxis.x;
+ rot[Matrix4.M12] = zAxis.y;
+ rot[Matrix4.M22] = zAxis.z;
+
+ rot[Matrix4.M33] = 1;
+
+ Matrix4 m = new Matrix4(rot);
+ m.setTranslation(point[0], point[1], point[2]);
+
+ return m;
+ }
+
+ /**
+ * Calculates the transform between the color camera at a time t0 and the depth camera at a
+ * time t1, given the device poses at t0 and t1.
+ */
+ public TangoPoseData calculateColorCameraTDepthWithTime(TangoPoseData ssTdeviceT0pose,
+ TangoPoseData ssTdeviceT1pose) {
+ if (mDeviceTDepthCamera == null) {
+ throw new RuntimeException("You must call setupExtrinsics first");
+ }
+
+ // Operation at hand:
+ // rgb_t0_T_depth_t1 = rgb_T_dev * dev_t0_T_ss * ss_T_dev_t1 * dev_T_depth
+
+ Matrix4 ssTdeviceT0 = tangoPoseToMatrix(ssTdeviceT0pose);
+ Matrix4 ssTdeviceT1 = tangoPoseToMatrix(ssTdeviceT1pose);
+
+ Matrix4 rgbt0Tdeptht1 = mDeviceTColorCamera.clone().inverse().
+ multiply(ssTdeviceT0.inverse()).multiply(ssTdeviceT1).multiply(mDeviceTDepthCamera);
+
+ return matrixToTangoPose(rgbt0Tdeptht1);
+ }
}
diff --git a/TangoUtils/app/src/main/java/com/projecttango/rajawali/ar/TangoRajawaliRenderer.java b/TangoUtils/app/src/main/java/com/projecttango/rajawali/ar/TangoRajawaliRenderer.java
index 86a2adc5..b6798445 100644
--- a/TangoUtils/app/src/main/java/com/projecttango/rajawali/ar/TangoRajawaliRenderer.java
+++ b/TangoUtils/app/src/main/java/com/projecttango/rajawali/ar/TangoRajawaliRenderer.java
@@ -16,10 +16,13 @@
package com.projecttango.rajawali.ar;
import android.content.Context;
+import android.util.Log;
import com.google.atap.tangoservice.Tango;
import com.google.atap.tangoservice.TangoCameraIntrinsics;
import com.google.atap.tangoservice.TangoCoordinateFramePair;
+import com.google.atap.tangoservice.TangoErrorException;
+import com.google.atap.tangoservice.TangoInvalidException;
import com.google.atap.tangoservice.TangoPoseData;
import com.projecttango.rajawali.Pose;
import com.projecttango.rajawali.ScenePoseCalcuator;
@@ -31,6 +34,8 @@
import org.rajawali3d.primitives.ScreenQuad;
import org.rajawali3d.renderer.RajawaliRenderer;
+import javax.microedition.khronos.opengles.GL10;
+
/**
* This is a specialization of RajawaliRenderer
that makes it easy to build
* augmented reality applications.
@@ -70,11 +75,11 @@ public abstract class TangoRajawaliRenderer extends RajawaliRenderer {
private ScreenQuad mBackgroundQuad;
private boolean mIsCameraConfigured = false;
- protected ScenePoseCalcuator scenePoseCalcuator;
+ protected ScenePoseCalcuator mScenePoseCalcuator;
public TangoRajawaliRenderer(Context context) {
super(context);
- scenePoseCalcuator = new ScenePoseCalcuator();
+ mScenePoseCalcuator = new ScenePoseCalcuator();
}
/**
@@ -105,36 +110,54 @@ protected void onRender(long ellapsedRealtime, double deltaTime) {
synchronized (this) {
if (mTango != null) {
- if (mUpdatePending) {
- mLastRGBFrameTimestamp = updateTexture();
- mUpdatePending = false;
- }
- if (mLastRGBFrameTimestamp != mLastSceneCameraFrameTimestamp) {
- // We delay the camera set-up until now because if we do it earlier (i.e.: when the
- // camera is connected to the renderer) the Tango service may still not have the
- // necessary intrinsic and extrinsic transformation information available
- if (!mIsCameraConfigured) {
- configureCamera();
- mIsCameraConfigured = true;
+ try {
+ if (mUpdatePending) {
+ mLastRGBFrameTimestamp = updateTexture();
+ mUpdatePending = false;
}
+ if (mLastRGBFrameTimestamp != mLastSceneCameraFrameTimestamp) {
+ // We delay the camera set-up until now because if we do it earlier (i.e.: when the
+ // camera is connected to the renderer) the Tango service may still not have the
+ // necessary intrinsic and extrinsic transformation information available
+ if (!mIsCameraConfigured) {
+ configureCamera();
+ mIsCameraConfigured = true;
+ }
- // Calculate the device pose at the camera frame update time
- TangoPoseData lastFramePose =
- mTango.getPoseAtTime(mLastRGBFrameTimestamp, TANGO_WORLD_T_DEVICE);
- // Fall back to latest available time if for some reason that fails
- if (lastFramePose.statusCode != TangoPoseData.POSE_VALID) {
- lastFramePose = mTango.getPoseAtTime(0, TANGO_WORLD_T_DEVICE);
- }
- if (lastFramePose.statusCode == TangoPoseData.POSE_VALID) {
- Pose sceneCameraPose = scenePoseCalcuator.toOpenGLCameraPose(lastFramePose);
- updateCameraPose(sceneCameraPose);
- mLastSceneCameraFrameTimestamp = mLastRGBFrameTimestamp;
+ // Calculate the device pose at the camera frame update time
+ TangoPoseData lastFramePose =
+ mTango.getPoseAtTime(mLastRGBFrameTimestamp, TANGO_WORLD_T_DEVICE);
+ // Fall back to latest available time if for some reason that fails
+ if (lastFramePose.statusCode != TangoPoseData.POSE_VALID) {
+ lastFramePose = mTango.getPoseAtTime(0, TANGO_WORLD_T_DEVICE);
+ } else {
+ Pose sceneCameraPose = mScenePoseCalcuator.toOpenGLCameraPose(lastFramePose);
+ updateCameraPose(sceneCameraPose);
+ mLastSceneCameraFrameTimestamp = mLastRGBFrameTimestamp;
+ }
}
+ } catch (TangoInvalidException ex) {
+ Log.e(TAG, "Error while updating texture!", ex);
+ } catch (TangoErrorException ex) {
+ Log.e(TAG, "Error while updating texture!", ex);
}
}
}
}
+ /**
+ * Override onRenderSurfaceSizeChanged() so that it will be called after onSurfaceCreated,
+ * nested view get reset or resized, including activity get paused and resumed, in this function
+ * sets mIsCameracConfigured to false since Rajawali will reset the scene camera if SurfaceSizeChanged
+ * get called.
+ */
+ @Override
+ public void onRenderSurfaceSizeChanged(GL10 gl, int width, int height) {
+ super.onRenderSurfaceSizeChanged(gl, width, height);
+ mConnectedTextureId = -1;
+ mIsCameraConfigured = false;
+ }
+
/**
* Triggered whenever there is new information of the Tango position, with the provided
* data to update the Rajawali camera to match.
@@ -198,40 +221,18 @@ private void configureCamera() {
// Configure the Rajawali Scene camera projection to match the Tango camera intrinsic
TangoCameraIntrinsics intrinsics = mTango.getCameraIntrinsics(mCameraId);
- Matrix4 projectionMatrix = scenePoseCalcuator.calculateProjectionMatrix(
+ Matrix4 projectionMatrix = mScenePoseCalcuator.calculateProjectionMatrix(
intrinsics.width, intrinsics.height, intrinsics.fx, intrinsics.fy, intrinsics.cx,
intrinsics.cy);
getCurrentCamera().setProjectionMatrix(projectionMatrix);
+ }
- // Configure the scene pose calculator with the transformation between the selected
- // camera and the device.
- // Note that this requires going through the IMU since the Tango service can't calculate
- // the transform between the camera and the device directly
-
- // 1- Create Camera to IMU Transform
- TangoCoordinateFramePair framePair = new TangoCoordinateFramePair();
- switch (mCameraId) {
- case TangoCameraIntrinsics.TANGO_CAMERA_COLOR:
- framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR;
- break;
- case TangoCameraIntrinsics.TANGO_CAMERA_FISHEYE:
- framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_FISHEYE;
- break;
- default:
- throw new RuntimeException("Unsupported camera ID: " + mCameraId);
- }
- framePair.baseFrame = TangoPoseData.COORDINATE_FRAME_IMU;
- final TangoPoseData imuTrgbPose = mTango.getPoseAtTime(0.0, framePair);
- Matrix4 imuTrgb = ScenePoseCalcuator.tangoPoseToMatrix(imuTrgbPose);
-
- // 2- Create Device to IMU Transform and invert
- framePair.baseFrame = TangoPoseData.COORDINATE_FRAME_IMU;
- framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_DEVICE;
- final TangoPoseData imuTdevicePose = mTango.getPoseAtTime(0.0, framePair);
- Matrix4 imuTdev = ScenePoseCalcuator.tangoPoseToMatrix(imuTdevicePose);
-
- // 3- Combine both to get the transform from the device pose to the RGB camera pose
- scenePoseCalcuator.setupExtrinsics(imuTrgb, imuTdev);
+ /**
+ * Set-up device to sensors transforms
+ */
+ public void setupExtrinsics(TangoPoseData imuTDevicePose, TangoPoseData imuTColorCameraPose,
+ TangoPoseData imuTDepthCameraPose) {
+ mScenePoseCalcuator.setupExtrinsics(imuTDevicePose, imuTColorCameraPose, imuTDepthCameraPose);
}
/**