How to use onSensorChanged sensor data in combination with OpenGL
Posted
by Sponge
on Stack Overflow
See other posts from Stack Overflow
or by Sponge
Published on 2010-05-21T10:02:48Z
Indexed on
2010/05/21
14:40 UTC
Read the original article
Hit count: 482
I have written a TestSuite to find out how to calculate the rotation angles from the data you get in SensorEventListener.onSensorChanged(). I really hope you can complete my solution to help people who will have the same problems like me. Here is the code, i think you will understand it after reading it.
Feel free to change it, the main idea was to implement several methods to send the orientation angles to the opengl view or any other target which would need it.
method 1 to 4 are working, they are directly sending the rotationMatrix to the OpenGl view. all other methods are not working or buggy and i hope someone knows to get them working. i think the best method would be method 5 if it would work, because it would be the easiest to understand but i'm not sure how efficient it is. the complete code isn't optimized so i recommend to not use it as it is in your project.
here it is:
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import static javax.microedition.khronos.opengles.GL10.*;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.opengl.GLSurfaceView;
import android.opengl.GLSurfaceView.Renderer;
import android.os.Bundle;
import android.util.Log;
import android.view.WindowManager;
/**
* This class provides a basic demonstration of how to use the
* {@link android.hardware.SensorManager SensorManager} API to draw a 3D
* compass.
*/
public class SensorToOpenGlTests extends Activity implements Renderer,
SensorEventListener {
private static final boolean TRY_TRANSPOSED_VERSION = false;
/*
* MODUS overview:
*
* 1 - unbufferd data directly transfaired from the rotation matrix to the
* modelview matrix
*
* 2 - buffered version of 1 where both acceleration and magnetometer are
* buffered
*
* 3 - buffered version of 1 where only magnetometer is buffered
*
* 4 - buffered version of 1 where only acceleration is buffered
*
* 5 - uses the orientation sensor and sets the angles how to rotate the
* camera with glrotate()
*
* 6 - uses the rotation matrix to calculate the angles
*
* 7 to 12 - every possibility how the rotationMatrix could be constructed
* in SensorManager.getRotationMatrix (see
* http://www.songho.ca/opengl/gl_anglestoaxes.html#anglestoaxes for all
* possibilities)
*/
private static int MODUS = 2;
private GLSurfaceView openglView;
private FloatBuffer vertexBuffer;
private ByteBuffer indexBuffer;
private FloatBuffer colorBuffer;
private SensorManager mSensorManager;
private float[] rotationMatrix = new float[16];
private float[] accelGData = new float[3];
private float[] bufferedAccelGData = new float[3];
private float[] magnetData = new float[3];
private float[] bufferedMagnetData = new float[3];
private float[] orientationData = new float[3];
// private float[] mI = new float[16];
private float[] resultingAngles = new float[3];
private int mCount;
final static float rad2deg = (float) (180.0f / Math.PI);
private boolean mirrorOnBlueAxis = false;
private boolean landscape;
public SensorToOpenGlTests() {
}
/** Called with the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mSensorManager = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
openglView = new GLSurfaceView(this);
openglView.setRenderer(this);
setContentView(openglView);
}
@Override
protected void onResume() {
// Ideally a game should implement onResume() and onPause()
// to take appropriate action when the activity looses focus
super.onResume();
openglView.onResume();
if (((WindowManager) getSystemService(WINDOW_SERVICE))
.getDefaultDisplay().getOrientation() == 1) {
landscape = true;
} else {
landscape = false;
}
mSensorManager.registerListener(this, mSensorManager
.getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
SensorManager.SENSOR_DELAY_GAME);
mSensorManager.registerListener(this, mSensorManager
.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD),
SensorManager.SENSOR_DELAY_GAME);
mSensorManager.registerListener(this, mSensorManager
.getDefaultSensor(Sensor.TYPE_ORIENTATION),
SensorManager.SENSOR_DELAY_GAME);
}
@Override
protected void onPause() {
// Ideally a game should implement onResume() and onPause()
// to take appropriate action when the activity looses focus
super.onPause();
openglView.onPause();
mSensorManager.unregisterListener(this);
}
public int[] getConfigSpec() {
// We want a depth buffer, don't care about the
// details of the color buffer.
int[] configSpec = { EGL10.EGL_DEPTH_SIZE, 16, EGL10.EGL_NONE };
return configSpec;
}
public void onDrawFrame(GL10 gl) {
// clear screen and color buffer:
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
// set target matrix to modelview matrix:
gl.glMatrixMode(GL10.GL_MODELVIEW);
// init modelview matrix:
gl.glLoadIdentity();
// move camera away a little bit:
if ((MODUS == 1) || (MODUS == 2) || (MODUS == 3) || (MODUS == 4)) {
if (landscape) {
// in landscape mode first remap the rotationMatrix before using
// it with glMultMatrixf:
float[] result = new float[16];
SensorManager.remapCoordinateSystem(rotationMatrix,
SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X,
result);
gl.glMultMatrixf(result, 0);
} else {
gl.glMultMatrixf(rotationMatrix, 0);
}
} else {
//in all other modes do the rotation by hand:
gl.glRotatef(resultingAngles[1], 1, 0, 0);
gl.glRotatef(resultingAngles[2], 0, 1, 0);
gl.glRotatef(resultingAngles[0], 0, 0, 1);
if (mirrorOnBlueAxis) {
//this is needed for mode 6 to work
gl.glScalef(1, 1, -1);
}
}
//move the axis to simulate augmented behaviour:
gl.glTranslatef(0, 2, 0);
// draw the 3 axis on the screen:
gl.glVertexPointer(3, GL_FLOAT, 0, vertexBuffer);
gl.glColorPointer(4, GL_FLOAT, 0, colorBuffer);
gl.glDrawElements(GL_LINES, 6, GL_UNSIGNED_BYTE, indexBuffer);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
float r = (float) width / height;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-r, r, -1, 1, 1, 10);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glDisable(GL10.GL_DITHER);
gl.glClearColor(1, 1, 1, 1);
gl.glEnable(GL10.GL_CULL_FACE);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glEnable(GL10.GL_DEPTH_TEST);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
// load the 3 axis and there colors:
float vertices[] = { 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1 };
float colors[] = { 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1 };
byte indices[] = { 0, 1, 0, 2, 0, 3 };
ByteBuffer vbb;
vbb = ByteBuffer.allocateDirect(vertices.length * 4);
vbb.order(ByteOrder.nativeOrder());
vertexBuffer = vbb.asFloatBuffer();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
vbb = ByteBuffer.allocateDirect(colors.length * 4);
vbb.order(ByteOrder.nativeOrder());
colorBuffer = vbb.asFloatBuffer();
colorBuffer.put(colors);
colorBuffer.position(0);
indexBuffer = ByteBuffer.allocateDirect(indices.length);
indexBuffer.put(indices);
indexBuffer.position(0);
}
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
public void onSensorChanged(SensorEvent event) {
// load the new values:
loadNewSensorData(event);
if (MODUS == 1) {
SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
magnetData);
}
if (MODUS == 2) {
rootMeanSquareBuffer(bufferedAccelGData, accelGData);
rootMeanSquareBuffer(bufferedMagnetData, magnetData);
SensorManager.getRotationMatrix(rotationMatrix, null,
bufferedAccelGData, bufferedMagnetData);
}
if (MODUS == 3) {
rootMeanSquareBuffer(bufferedMagnetData, magnetData);
SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
bufferedMagnetData);
}
if (MODUS == 4) {
rootMeanSquareBuffer(bufferedAccelGData, accelGData);
SensorManager.getRotationMatrix(rotationMatrix, null,
bufferedAccelGData, magnetData);
}
if (MODUS == 5) {
// this mode uses the sensor data recieved from the orientation
// sensor
resultingAngles = orientationData.clone();
if ((-90 > resultingAngles[1]) || (resultingAngles[1] > 90)) {
resultingAngles[1] = orientationData[0];
resultingAngles[2] = orientationData[1];
resultingAngles[0] = orientationData[2];
}
}
if (MODUS == 6) {
SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
magnetData);
final float[] anglesInRadians = new float[3];
SensorManager.getOrientation(rotationMatrix, anglesInRadians);
if ((-90 < anglesInRadians[2] * rad2deg)
&& (anglesInRadians[2] * rad2deg < 90)) {
// device camera is looking on the floor
// this hemisphere is working fine
mirrorOnBlueAxis = false;
resultingAngles[0] = anglesInRadians[0] * rad2deg;
resultingAngles[1] = anglesInRadians[1] * rad2deg;
resultingAngles[2] = anglesInRadians[2] * -rad2deg;
} else {
mirrorOnBlueAxis = true;
// device camera is looking in the sky
// this hemisphere is mirrored at the blue axis
resultingAngles[0] = (anglesInRadians[0] * rad2deg);
resultingAngles[1] = (anglesInRadians[1] * rad2deg);
resultingAngles[2] = (anglesInRadians[2] * rad2deg);
}
}
if (MODUS == 7) {
SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
magnetData);
rotationMatrix = transpose(rotationMatrix);
/*
* this assumes that the rotation matrices are multiplied in x y z
* order Rx*Ry*Rz
*/
resultingAngles[2] = (float) (Math.asin(rotationMatrix[2]));
final float cosB = (float) Math.cos(resultingAngles[2]);
resultingAngles[2] = resultingAngles[2] * rad2deg;
resultingAngles[0] = -(float) (Math.acos(rotationMatrix[0] / cosB))
* rad2deg;
resultingAngles[1] = (float) (Math.acos(rotationMatrix[10] / cosB))
* rad2deg;
}
if (MODUS == 8) {
SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
magnetData);
rotationMatrix = transpose(rotationMatrix);
/*
* this assumes that the rotation matrices are multiplied in z y x
*/
resultingAngles[2] = (float) (Math.asin(-rotationMatrix[8]));
final float cosB = (float) Math.cos(resultingAngles[2]);
resultingAngles[2] = resultingAngles[2] * rad2deg;
resultingAngles[1] = (float) (Math.acos(rotationMatrix[9] / cosB))
* rad2deg;
resultingAngles[0] = (float) (Math.asin(rotationMatrix[4] / cosB))
* rad2deg;
}
if (MODUS == 9) {
SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
magnetData);
rotationMatrix = transpose(rotationMatrix);
/*
* this assumes that the rotation matrices are multiplied in z x y
*
* note z axis looks good at this one
*/
resultingAngles[1] = (float) (Math.asin(rotationMatrix[9]));
final float minusCosA = -(float) Math.cos(resultingAngles[1]);
resultingAngles[1] = resultingAngles[1] * rad2deg;
resultingAngles[2] = (float) (Math.asin(rotationMatrix[8]
/ minusCosA))
* rad2deg;
resultingAngles[0] = (float) (Math.asin(rotationMatrix[1]
/ minusCosA))
* rad2deg;
}
if (MODUS == 10) {
SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
magnetData);
rotationMatrix = transpose(rotationMatrix);
/*
* this assumes that the rotation matrices are multiplied in y x z
*/
resultingAngles[1] = (float) (Math.asin(-rotationMatrix[6]));
final float cosA = (float) Math.cos(resultingAngles[1]);
resultingAngles[1] = resultingAngles[1] * rad2deg;
resultingAngles[2] = (float) (Math.asin(rotationMatrix[2] / cosA))
* rad2deg;
resultingAngles[0] = (float) (Math.acos(rotationMatrix[5] / cosA))
* rad2deg;
}
if (MODUS == 11) {
SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
magnetData);
rotationMatrix = transpose(rotationMatrix);
/*
* this assumes that the rotation matrices are multiplied in y z x
*/
resultingAngles[0] = (float) (Math.asin(rotationMatrix[4]));
final float cosC = (float) Math.cos(resultingAngles[0]);
resultingAngles[0] = resultingAngles[0] * rad2deg;
resultingAngles[2] = (float) (Math.acos(rotationMatrix[0] / cosC))
* rad2deg;
resultingAngles[1] = (float) (Math.acos(rotationMatrix[5] / cosC))
* rad2deg;
}
if (MODUS == 12) {
SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
magnetData);
rotationMatrix = transpose(rotationMatrix);
/*
* this assumes that the rotation matrices are multiplied in x z y
*/
resultingAngles[0] = (float) (Math.asin(-rotationMatrix[1]));
final float cosC = (float) Math.cos(resultingAngles[0]);
resultingAngles[0] = resultingAngles[0] * rad2deg;
resultingAngles[2] = (float) (Math.acos(rotationMatrix[0] / cosC))
* rad2deg;
resultingAngles[1] = (float) (Math.acos(rotationMatrix[5] / cosC))
* rad2deg;
}
logOutput();
}
/**
* transposes the matrix because it was transposted (inverted, but here its
* the same, because its a rotation matrix) to be used for opengl
*
* @param source
* @return
*/
private float[] transpose(float[] source) {
final float[] result = source.clone();
if (TRY_TRANSPOSED_VERSION) {
result[1] = source[4];
result[2] = source[8];
result[4] = source[1];
result[6] = source[9];
result[8] = source[2];
result[9] = source[6];
}
// the other values in the matrix are not relevant for rotations
return result;
}
private void rootMeanSquareBuffer(float[] target, float[] values) {
final float amplification = 200.0f;
float buffer = 20.0f;
target[0] += amplification;
target[1] += amplification;
target[2] += amplification;
values[0] += amplification;
values[1] += amplification;
values[2] += amplification;
target[0] = (float) (Math
.sqrt((target[0] * target[0] * buffer + values[0] * values[0])
/ (1 + buffer)));
target[1] = (float) (Math
.sqrt((target[1] * target[1] * buffer + values[1] * values[1])
/ (1 + buffer)));
target[2] = (float) (Math
.sqrt((target[2] * target[2] * buffer + values[2] * values[2])
/ (1 + buffer)));
target[0] -= amplification;
target[1] -= amplification;
target[2] -= amplification;
values[0] -= amplification;
values[1] -= amplification;
values[2] -= amplification;
}
private void loadNewSensorData(SensorEvent event) {
final int type = event.sensor.getType();
if (type == Sensor.TYPE_ACCELEROMETER) {
accelGData = event.values.clone();
}
if (type == Sensor.TYPE_MAGNETIC_FIELD) {
magnetData = event.values.clone();
}
if (type == Sensor.TYPE_ORIENTATION) {
orientationData = event.values.clone();
}
}
private void logOutput() {
if (mCount++ > 30) {
mCount = 0;
Log.d("Compass", "yaw0: " + (int) (resultingAngles[0])
+ " pitch1: " + (int) (resultingAngles[1]) + " roll2: "
+ (int) (resultingAngles[2]));
}
}
}
© Stack Overflow or respective owner