android.hardware.SensorManager#getRotationMatrixFromVector ( )源码实例Demo

下面列出了android.hardware.SensorManager#getRotationMatrixFromVector ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: open-location-code   文件: LocationProvider.java
@Override
public void onSensorChanged(SensorEvent event) {
    float rotationMatrix[] = new float[16];
    SensorManager.getRotationMatrixFromVector(rotationMatrix, event.values);
    float[] orientationValues = new float[3];
    readDisplayRotation();
    SensorManager.remapCoordinateSystem(rotationMatrix, mAxisX, mAxisY, rotationMatrix);
    SensorManager.getOrientation(rotationMatrix, orientationValues);
    double azimuth = Math.toDegrees(orientationValues[0]);
    // Azimuth values are now -180-180 (N=0), but once added to the location object
    // they become 0-360 (N=0).
    @SuppressLint("UseValueOf") Float newBearing = new Float(azimuth);
    if (mBearing == null || Math.abs(mBearing - newBearing) > MIN_BEARING_DIFF) {
        mBearing = newBearing;
        if (mCurrentBestLocation != null) {
            mCurrentBestLocation.setBearing(mBearing);
        }
        mLocationCallback.handleNewBearing(mBearing);
    }
}
 
源代码2 项目: 365browser   文件: DeviceSensors.java
public void convertRotationVectorToAngles(float[] rotationVector, double[] angles) {
    if (rotationVector.length > 4) {
        // On some Samsung devices SensorManager.getRotationMatrixFromVector
        // appears to throw an exception if rotation vector has length > 4.
        // For the purposes of this class the first 4 values of the
        // rotation vector are sufficient (see crbug.com/335298 for details).
        System.arraycopy(rotationVector, 0, mTruncatedRotationVector, 0, 4);
        SensorManager.getRotationMatrixFromVector(
                mDeviceRotationMatrix, mTruncatedRotationVector);
    } else {
        SensorManager.getRotationMatrixFromVector(mDeviceRotationMatrix, rotationVector);
    }
    computeDeviceOrientationFromRotationMatrix(mDeviceRotationMatrix, angles);
    for (int i = 0; i < 3; i++) {
        angles[i] = Math.toDegrees(angles[i]);
    }
}
 
源代码3 项目: Pocket-Plays-for-Twitch   文件: StreamActivity.java
protected void update(float[] vectors) {
	int worldAxisX = SensorManager.AXIS_X;
	int worldAxisZ = SensorManager.AXIS_Z;

	float[] rotationMatrix = new float[9];
	float[] adjustedRotationMatrix = new float[9];
	float[] orientation = new float[3];

	SensorManager.getRotationMatrixFromVector(rotationMatrix, vectors);
	SensorManager.remapCoordinateSystem(rotationMatrix, worldAxisX, worldAxisZ, adjustedRotationMatrix);
	SensorManager.getOrientation(adjustedRotationMatrix, orientation);

	float roll = orientation[2] * FROM_RADS_TO_DEGS;

	if (roll > -45 && roll < 45) {
		setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
		Log.d(LOG_TAG, "Requesting undefined");
	}
	Log.d(LOG_TAG, "Roll: " + roll);
}
 
源代码4 项目: Myna   文件: Utils.java
public static void calculateWorldAcce(SensorData sd){
    float[] Rotate = new float[16];
    float[] I = new float[16];
    float[] currOrientation = new float[3];
    if((int)(sd.game_rotation_vector[0]) == 0
            && (int)(sd.game_rotation_vector[1]) == 0
            && (int)(sd.game_rotation_vector[2]) == 0){
        SensorManager.getRotationMatrix(Rotate, I, sd.accelerate, sd.magnetic);
    }else{
        SensorManager.getRotationMatrixFromVector(Rotate, sd.game_rotation_vector);
    }
    SensorManager.getOrientation(Rotate, currOrientation);
    System.arraycopy(currOrientation, 0, sd.orientation, 0, 3);

    float[] relativeAcc = new float[4];
    float[] earthAcc = new float[4];
    float[] inv = new float[16];
    System.arraycopy(sd.accelerate, 0, relativeAcc, 0, 3);
    relativeAcc[3] = 0;
    android.opengl.Matrix.invertM(inv, 0, Rotate, 0);
    android.opengl.Matrix.multiplyMV(earthAcc, 0, inv, 0, relativeAcc, 0);
    System.arraycopy(earthAcc, 0, sd.world_accelerometer, 0, 3);
}
 
源代码5 项目: SpeedHud   文件: OrientationManager.java
@Override
public void onSensorChanged(SensorEvent event) {
    if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
        // Get the current heading from the sensor, then notify the listeners of the
        // change.
        SensorManager.getRotationMatrixFromVector(mRotationMatrix, event.values);
        SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_X,
                SensorManager.AXIS_Z, mRotationMatrix);
        SensorManager.getOrientation(mRotationMatrix, mOrientation);

        // Store the pitch (used to display a message indicating that the user's head
        // angle is too steep to produce reliable results.
        mPitch = (float) Math.toDegrees(mOrientation[1]);

        // Convert the heading (which is relative to magnetic north) to one that is
        // relative to true north, using the user's current location to compute this.
        float magneticHeading = (float) Math.toDegrees(mOrientation[0]);
        mHeading = MathUtils.mod(computeTrueNorth(magneticHeading), 360.0f)
                - ARM_DISPLACEMENT_DEGREES;

        notifyOrientationChanged();
    }
}
 
@Override
public void onSensorChanged(SensorEvent event) {
    if( event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR ){
        // calculate th rotation matrix
        SensorManager.getRotationMatrixFromVector(rMat, event.values);
        // get the azimuth value (orientation[0]) in degree

        newAzimuth = (int) ((((( Math.toDegrees( SensorManager.getOrientation( rMat, orientation )[0] ) + 360 ) % 360) -
                      ( Math.toDegrees( SensorManager.getOrientation( rMat, orientation )[2] ))) +360) % 360);

        //dont react to changes smaller than the filter value
        if (Math.abs(mAzimuth - newAzimuth) < mFilter) {
            return;
        }

        getReactApplicationContext()
                .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
                .emit("headingUpdated", (int) newAzimuth);

        mAzimuth = newAzimuth;
    }
}
 
源代码7 项目: android   文件: CompassView.java
@Override
public void onSensorChanged(SensorEvent e) {
    loadSensorData(e);

    if (mUseRotation) {
        SensorManager.getRotationMatrixFromVector(mRotation, mRotationVector);
    } else if (mUseAccelerometer && mUseMagnetic) {
        SensorManager.getRotationMatrix(mRotation, null, mGravity, mGeomagnetic);
    }

    configureDeviceAngle();
    SensorManager.getOrientation(mRotationMapped, mOrientation);

    float bearing = (float) Math.toDegrees(mOrientation[0]);
    if (bearing < 0) bearing += 360;

    mDirection = mAzimuth - bearing;
    mDirection = (mDirection < 0) ? mDirection + 360 : mDirection;
}
 
private float[] calculateOrientation(float[] values) {
  float[] rotationMatrix = new float[9];
  float[] remappedMatrix = new float[9];
  float[] orientation = new float[3];

  // Determine the rotation matrix
  SensorManager.getRotationMatrixFromVector(rotationMatrix, values);

  // Remap the coordinates based on the natural device orientation.
  int x_axis = SensorManager.AXIS_X;
  int y_axis = SensorManager.AXIS_Y;
  switch (mScreenRotation) {
    case (Surface.ROTATION_90):
      x_axis = SensorManager.AXIS_Y;
      y_axis = SensorManager.AXIS_MINUS_X;
      break;
    case (Surface.ROTATION_180):
      y_axis = SensorManager.AXIS_MINUS_Y;
      break;
    case (Surface.ROTATION_270):
      x_axis = SensorManager.AXIS_MINUS_Y;
      y_axis = SensorManager.AXIS_X;
      break;
    default: break;
  }

  SensorManager.remapCoordinateSystem(rotationMatrix,
    x_axis, y_axis,
    remappedMatrix);

  // Obtain the current, corrected orientation.
  SensorManager.getOrientation(remappedMatrix, orientation);

  // Convert from Radians to Degrees.
  values[0] = (float) Math.toDegrees(orientation[0]);
  values[1] = (float) Math.toDegrees(orientation[1]);
  values[2] = (float) Math.toDegrees(orientation[2]);
  return values;
}
 
private void listing16_9() {
  final SensorEventListener mySensorEventListener = new SensorEventListener() {
    // Listing 16-10: Calculating the device orientation using the rotation vector
    public void onSensorChanged(SensorEvent sensorEvent) {
      float[] rotationMatrix = new float[9];
      float[] orientation = new float[3];

      // Convert the result Vector to a Rotation Matrix.
      SensorManager.getRotationMatrixFromVector(rotationMatrix,
        sensorEvent.values);

      // Extract the orientation from the Rotation Matrix.
      SensorManager.getOrientation(rotationMatrix, orientation);
      Log.d(TAG, "Yaw: " + orientation[0]); // Yaw
      Log.d(TAG, "Pitch: " + orientation[1]); // Pitch
      Log.d(TAG, "Roll: " + orientation[2]); // Roll
    }

    public void onAccuracyChanged(Sensor sensor, int accuracy) { }
  };

  // Listing 16-9: Monitoring an accelerometer sensor
  SensorManager sm = (SensorManager)getSystemService(Context.SENSOR_SERVICE);
  int sensorType = Sensor.TYPE_ACCELEROMETER;
  sm.registerListener(mySensorEventListener,
    sm.getDefaultSensor(sensorType),
    SensorManager.SENSOR_DELAY_NORMAL);
}
 
源代码10 项目: PanoramaGL   文件: OrientationView.java
@Override
public void onSensorChanged(SensorEvent event) {
  SensorManager.getRotationMatrixFromVector(phoneInWorldSpaceMatrix, event.values);
  if (startFromSensorTransformation == null) {
    // Android's hardware uses radians, but OpenGL uses degrees. Android uses
    // [yaw, pitch, roll] for the order of elements in the orientation array.
    float[] orientationRadians =
        SensorManager.getOrientation(phoneInWorldSpaceMatrix, new float[3]);
    startFromSensorTransformation = new float[3];
    for (int i = 0; i < 3; ++i) {
      startFromSensorTransformation[i] = (float) Math.toDegrees(orientationRadians[i]);
    }
  }
}
 
源代码11 项目: Android-Orientation-Sensor   文件: Orientation.java
public void gyroFunction(SensorEvent event) {

        if (accMagOrientation == null)
            return;


        if(initState) {
            float[] initMatrix = new float[9];
            initMatrix = getRotationMatrixFromOrientation(accMagOrientation);
            float[] test = new float[3];
            SensorManager.getOrientation(initMatrix, test);
            gyroMatrix = Matrix3x3.multiplication(gyroMatrix, initMatrix);
            initState = false;
        }

        // copy the new gyro values into the gyro array
        // convert the raw gyro data into a rotation vector
        float[] deltaVector = new float[4];
        if(timestamp != 0) {
            final float dT = (event.timestamp - timestamp) * NS2S;
            System.arraycopy(event.values, 0, gyro, 0, 3);
            getRotationVectorFromGyro(gyro, deltaVector, dT / 2.0f);
        }

        timestamp = event.timestamp;


        float[] deltaMatrix = new float[9];
        SensorManager.getRotationMatrixFromVector(deltaMatrix, deltaVector);

        gyroMatrix = Matrix3x3.multiplication(gyroMatrix, deltaMatrix);

        SensorManager.getOrientation(gyroMatrix, gyroOrientation);
    }
 
源代码12 项目: codeexamples-android   文件: RotationVectorDemo.java
public void onSensorChanged(SensorEvent event) {
    // we received a sensor event. it is a good practice to check
    // that we received the proper event
    if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
        // convert the rotation-vector to a 4x4 matrix. the matrix
        // is interpreted by Open GL as the inverse of the
        // rotation-vector, which is what we want.
        SensorManager.getRotationMatrixFromVector(
                mRotationMatrix , event.values);
    }
}
 
源代码13 项目: BleSensorTag   文件: SensorFusionEngine.java
public void onGyroDataUpdate(float[] gyro) {
    // initialisation of the gyroscope based rotation matrix
    if (!isGyroInitialized) {
        float[] initMatrix = getRotationMatrixFromOrientation(accMagOrientation);
        gyroMatrix = matrixMultiplication(gyroMatrix, initMatrix);
        isGyroInitialized = true;
    }

    // copy the new gyro values into the gyro array
    // convert the raw gyro data into a rotation vector
    float[] deltaVector = new float[4];
    final long currentTimestamp = System.nanoTime();
    if(timestamp != 0) {
        final float dT = (currentTimestamp - timestamp) * NS2S;
        System.arraycopy(gyro, 0, this.gyro, 0, 3);
        getRotationVectorFromGyro(this.gyro, deltaVector, dT / 2.0f);
    }

    // measurement done, save current time for next interval
    timestamp = currentTimestamp;

    // convert rotation vector into rotation matrix
    float[] deltaMatrix = new float[9];
    SensorManager.getRotationMatrixFromVector(deltaMatrix, deltaVector);

    // apply the new rotation interval on the gyroscope based rotation matrix
    gyroMatrix = matrixMultiplication(gyroMatrix, deltaMatrix);

    // get the gyroscope based orientation from the rotation matrix
    SensorManager.getOrientation(gyroMatrix, gyroOrientation);
}
 
源代码14 项目: cloud-cup-android   文件: TurnGameActivity.java
@Override
public void onSensorChanged(SensorEvent event) {
    SensorManager.getRotationMatrixFromVector(mRotationMatrix , event.values);
    SensorManager.getOrientation(mRotationMatrix, mOrientation);

    float zAngle = mOrientation[0];

    //Log.d(LOG_TAG, "Turn z: " + zAngle);

    if(originalAngle == 0) {
        originalAngle = zAngle;
    }

    if( !halfTurn && Math.abs(zAngle - backtoRange(originalAngle + Math.PI)) < ANGLE_SENSIBILITY ) {
        halfTurn = true;
        demiTurns++;
        sendTurnValues();
    }

    if( halfTurn && Math.abs(zAngle - originalAngle) < ANGLE_SENSIBILITY) {
        halfTurn = false;
        demiTurns++;
        ((Vibrator) getSystemService(Context.VIBRATOR_SERVICE)).vibrate(100);
        sendTurnValues();
    }

}
 
@Override
public void onSensorChanged(SensorEvent event) {

    // we received a sensor event. it is a good practice to check
    // that we received the proper event
    if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) {

        // This timestep's delta rotation to be multiplied by the current rotation
        // after computing it from the gyro sample data.
        if (timestamp != 0) {
            final float dT = (event.timestamp - timestamp) * NS2S;
            // Axis of the rotation sample, not normalized yet.
            float axisX = event.values[0];
            float axisY = event.values[1];
            float axisZ = event.values[2];

            // Calculate the angular speed of the sample
            gyroscopeRotationVelocity = Math.sqrt(axisX * axisX + axisY * axisY + axisZ * axisZ);

            // Normalize the rotation vector if it's big enough to get the axis
            if (gyroscopeRotationVelocity > EPSILON) {
                axisX /= gyroscopeRotationVelocity;
                axisY /= gyroscopeRotationVelocity;
                axisZ /= gyroscopeRotationVelocity;
            }

            // Integrate around this axis with the angular speed by the timestep
            // in order to get a delta rotation from this sample over the timestep
            // We will convert this axis-angle representation of the delta rotation
            // into a quaternion before turning it into the rotation matrix.
            double thetaOverTwo = gyroscopeRotationVelocity * dT / 2.0f;
            double sinThetaOverTwo = Math.sin(thetaOverTwo);
            double cosThetaOverTwo = Math.cos(thetaOverTwo);
            deltaQuaternion.setX((float) (sinThetaOverTwo * axisX));
            deltaQuaternion.setY((float) (sinThetaOverTwo * axisY));
            deltaQuaternion.setZ((float) (sinThetaOverTwo * axisZ));
            deltaQuaternion.setW(-(float) cosThetaOverTwo);

            // Matrix rendering in CubeRenderer does not seem to have this problem.
            synchronized (syncToken) {
                // Move current gyro orientation if gyroscope should be used
                deltaQuaternion.multiplyByQuat(currentOrientationQuaternion, currentOrientationQuaternion);
            }

            Quaternion correctedQuat = currentOrientationQuaternion.clone();
            // We inverted w in the deltaQuaternion, because currentOrientationQuaternion required it.
            // Before converting it back to matrix representation, we need to revert this process
            correctedQuat.w(-correctedQuat.w());

            synchronized (syncToken) {
                // Set the rotation matrix as well to have both representations
                SensorManager.getRotationMatrixFromVector(currentOrientationRotationMatrix.matrix,
                        correctedQuat.ToArray());
            }
        }
        timestamp = event.timestamp;
    }
}
 
源代码16 项目: VideoOS-Android-SDK   文件: SensorInterpreter.java
@SuppressWarnings("SuspiciousNameCombination")
public float[] interpretSensorEvent(@NonNull Context context, @Nullable SensorEvent event) {
    if (event == null) {
        return null;
    }

    float[] rotationVector = getRotationVectorFromSensorEvent(event);

    if (!mTargeted) {
        setTargetVector(rotationVector);
        return null;
    }

    SensorManager.getRotationMatrixFromVector(mRotationMatrix, rotationVector);

     int rotation = ((WindowManager) context
            .getSystemService(Context.WINDOW_SERVICE))
            .getDefaultDisplay()
            .getRotation();

    if (rotation == Surface.ROTATION_0) {
        SensorManager.getAngleChange(mTiltVector, mRotationMatrix, mTargetMatrix);
    } else {
        switch (rotation) {
            case Surface.ROTATION_90:
                SensorManager.remapCoordinateSystem(mRotationMatrix, AXIS_Y, AXIS_MINUS_X, mOrientedRotationMatrix);
                break;

            case Surface.ROTATION_180:
                SensorManager.remapCoordinateSystem(mRotationMatrix, AXIS_MINUS_X, AXIS_MINUS_Y, mOrientedRotationMatrix);
                break;

            case Surface.ROTATION_270:
                SensorManager.remapCoordinateSystem(mRotationMatrix, AXIS_MINUS_Y, AXIS_X, mOrientedRotationMatrix);
                break;
        }

        SensorManager.getAngleChange(mTiltVector, mOrientedRotationMatrix, mTargetMatrix);
    }

    for (int i = 0; i < mTiltVector.length; i++) {
        mTiltVector[i] /= Math.PI;

        mTiltVector[i] *= mTiltSensitivity;

        if (mTiltVector[i] > 1) {
            mTiltVector[i] = 1f;
        } else if (mTiltVector[i] < -1) {
            mTiltVector[i] = -1f;
        }
    }

    return mTiltVector;
}
 
源代码17 项目: VideoOS-Android-SDK   文件: SensorInterpreter.java
void setTargetVector(float[] values) {
    SensorManager.getRotationMatrixFromVector(mTargetMatrix, values);
    mTargeted = true;
}
 
源代码18 项目: ShaderEditor   文件: GyroscopeListener.java
@Override
public void onSensorChanged(SensorEvent event) {
	if (last > 0) {
		final float dT = (event.timestamp - last) * NS2S;

		// axis of the rotation sample, not normalized yet
		float axisX = event.values[0];
		float axisY = event.values[1];
		float axisZ = event.values[2];

		// calculate the angular speed of the sample
		float omegaMagnitude = (float) Math.sqrt(
				axisX * axisX + axisY * axisY + axisZ * axisZ);

		// normalize the rotation vector
		if (omegaMagnitude > EPSILON) {
			axisX /= omegaMagnitude;
			axisY /= omegaMagnitude;
			axisZ /= omegaMagnitude;
		}

		// integrate around this axis with the angular speed by the
		// timestep in order to get a delta rotation from this sample
		// over the timestep; then convert this axis-angle representation
		// of the delta rotation into a quaternion before turning it
		// into the rotation matrix
		float thetaOverTwo = omegaMagnitude * dT / 2.0f;
		float sinThetaOverTwo = (float) Math.sin(thetaOverTwo);
		float cosThetaOverTwo = (float) Math.cos(thetaOverTwo);
		deltaRotationVector[0] = sinThetaOverTwo * axisX;
		deltaRotationVector[1] = sinThetaOverTwo * axisY;
		deltaRotationVector[2] = sinThetaOverTwo * axisZ;
		deltaRotationVector[3] = cosThetaOverTwo;

		SensorManager.getRotationMatrixFromVector(
				deltaRotationMatrix,
				deltaRotationVector);

		float r0 = rotation[0];
		float r1 = rotation[1];
		float r2 = rotation[2];
		rotation[0] = r0 * deltaRotationMatrix[0] +
				r1 * deltaRotationMatrix[1] +
				r2 * deltaRotationMatrix[2];
		rotation[1] = r0 * deltaRotationMatrix[3] +
				r1 * deltaRotationMatrix[4] +
				r2 * deltaRotationMatrix[5];
		rotation[2] = r0 * deltaRotationMatrix[6] +
				r1 * deltaRotationMatrix[7] +
				r2 * deltaRotationMatrix[8];
	}

	last = event.timestamp;
}
 
源代码19 项目: Pano360   文件: SensorUtils.java
public static void getOrientation(SensorEvent event,float[] output){
    //sensorRotationVectorToMatrix(event,oTmp);
    SensorManager.getRotationMatrixFromVector(oTmp, event.values);
    SensorManager.getOrientation(oTmp,output);
}
 
源代码20 项目: motion   文件: SensorInterpreter.java
/**
 * Sets the target direction used for angle deltas to determine tilt.
 *
 * @param values a rotation vector (presumably from a ROTATION_VECTOR sensor)
 */
protected void setTargetVector(float[] values) {
    SensorManager.getRotationMatrixFromVector(mTargetMatrix, values);
    mTargeted = true;
}