/*
- * Copyright (c) 2014 Samsung Electronics Co., Ltd.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
// INTERNAL INCLUDES
#include <dali/internal/update/gestures/pan-gesture-profiling.h>
+#include <dali/integration-api/debug.h>
namespace Dali
{
namespace
{
-const int MAX_GESTURE_AGE = 50; ///< maximum age of a gesture before disallowing its use in algorithm TODO: Possibly make this configurable.
-const float ACCELERATION_THRESHOLD = 0.1f; ///< minimum pan velocity change to trigger dynamic change of prediction amount
-const unsigned int DEFAULT_PREDICTION_INTERPOLATION = 5; ///< how much to interpolate pan position and displacement from last vsync time (in milliseconds)
-const unsigned int DEFAULT_MAX_PREDICTION_INTERPOLATION = 32; ///< the upper bound of the range to clamp the prediction interpolation
-const unsigned int DEFAULT_MIN_PREDICTION_INTERPOLATION = 0; ///< the lower bound of the range to clamp the prediction interpolation
-const unsigned int DEFAULT_PREDICTION_INTERPOLATION_ADJUSTMENT = 2; ///< the amount of prediction interpolation to adjust (in milliseconds) each time when pan velocity changes
-const float DEFAULT_SMOOTHING_AMOUNT = 0.25f; ///< how much to interpolate pan position and displacement from last vsync time
+
+// TODO: Experimental - for changing in code only:
+const bool TEST_TUNE_ENABLE_OVERSHOOT_PROTECTION = false;
+
+// Internal defaults:
+const int MAX_GESTURE_AGE = 200; ///< maximum age of a gesture before disallowing its use in algorithm TODO: Possibly make this configurable.
+const float ACCELERATION_THRESHOLD = 0.1f; ///< minimum pan velocity change to trigger dynamic change of prediction amount.
+const float OUTPUT_TIME_DIFFERENCE = ( 1000.0f / 60.0f ); ///< This is used to optionally override actual times if they make results worse.
+const float ACCELERATION_SMOOTHING = 0.44f; ///< Smoothes acceleration changes from one frame to another.
+const float ACCELERATION_CAP = 0.0004f;///< Limits acceleration changes from one frame to another.
+
+// Defaults for Environment Variables:
+
+// Prediction Mode 1:
+const unsigned int DEFAULT_MAX_PREDICTION_AMOUNT = 32; ///< the upper bound of the range to clamp the prediction interpolation.
+const unsigned int DEFAULT_MIN_PREDICTION_AMOUNT = 0; ///< the lower bound of the range to clamp the prediction interpolation.
+const unsigned int DEFAULT_PREDICTION_AMOUNT_ADJUSTMENT = 2; ///< the amount of prediction interpolation to adjust (in milliseconds) each time when pan velocity changes.
+
+// Prediction Mode 2:
+const bool DEFAULT_USE_ACTUAL_TIMES = false; ///< Disable to optionally override actual times if they make results worse.
+const int DEFAULT_INTERPOLATION_TIME_RANGE = 255; ///< Time into past history (ms) to use points to interpolate the first point.
+const bool DEFAULT_SCALAR_ONLY_PREDICTION_ENABLED = false; ///< If enabled, prediction is done using velocity alone (no integration or acceleration).
+const bool DEFAULT_TWO_POINT_PREDICTION_ENABLED = true; ///< If enabled, a second interpolated point is predicted and combined with the first to get more stable values.
+const int DEFAULT_TWO_POINT_PAST_INTERPOLATE_TIME = 42; ///< The target time in the past to generate the second interpolated point.
+const float DEFAULT_TWO_POINT_VELOCITY_BIAS = 0.35f; ///< The ratio of first and second interpolated points to use for velocity. 0.0f = 100% of first point. 1.0f = 100% of second point.
+const float DEFAULT_TWO_POINT_ACCELERATION_BIAS = 0.10f; ///< The ratio of first and second interpolated points to use for acceleration. 0.0f = 100% of first point. 1.0f = 100% of second point.
+const int DEFAULT_MULTITAP_SMOOTHING_RANGE = 34; ///< The range in time (ms) of points in the history to smooth the final output against.
+
+// Prediction Modes 1 & 2.
+const unsigned int DEFAULT_PREDICTION_AMOUNT[2] = { 5, 57 }; ///< how much to interpolate pan position and displacement from last vsync time (in milliseconds)
+const float DEFAULT_SMOOTHING_AMOUNT[2] = { 0.25f, 0.23f }; ///< how much to smooth final result from last vsync time
+
} // unnamed namespace
const PanGesture::PredictionMode PanGesture::DEFAULT_PREDICTION_MODE = PanGesture::PREDICTION_NONE;
-const int PanGesture::NUM_PREDICTION_MODES = PanGesture::PREDICTION_1 + 1;
+const int PanGesture::NUM_PREDICTION_MODES = PanGesture::PREDICTION_2 + 1;
const PanGesture::SmoothingMode PanGesture::DEFAULT_SMOOTHING_MODE = PanGesture::SMOOTHING_LAST_VALUE;
-const int PanGesture::NUM_SMOOTHING_MODES = PanGesture::SMOOTHING_LAST_VALUE + 1;
+const int PanGesture::NUM_SMOOTHING_MODES = PanGesture::SMOOTHING_MULTI_TAP + 1;
PanGesture* PanGesture::New()
{
void PanGesture::AddGesture( const Dali::PanGesture& gesture )
{
+ Dali::Mutex::ScopedLock lock( mMutex );
mGestures[ mWritePosition ] = gesture;
// Update our write position.
}
}
-void PanGesture::PredictiveAlgorithm1(int eventsThisFrame, PanInfo& gestureOut, PanInfoHistory& panHistory, unsigned int lastVSyncTime, unsigned int nextVSyncTime)
+void PanGesture::PredictionMode1(int eventsThisFrame, PanInfo& gestureOut, PanInfoHistory& panHistory, unsigned int lastVSyncTime, unsigned int nextVSyncTime)
{
RemoveOldHistory(panHistory, lastVSyncTime, MAX_GESTURE_AGE, 0);
size_t panHistorySize = panHistory.size();
gestureOut.time += interpolationTime;
}
-void PanGesture::SmoothingAlgorithm1(bool justStarted, PanInfo& gestureOut, unsigned int lastVSyncTime)
+void PanGesture::BlendPoints( PanInfo& gesture, PanInfo& lastGesture, float blendValue )
{
- if( justStarted )
- {
- gestureOut.screen.displacement = Vector2::ZERO;
- gestureOut.screen.velocity = Vector2::ZERO;
- gestureOut.local.displacement = Vector2::ZERO;
- gestureOut.local.velocity = Vector2::ZERO;
- }
- else
- {
- gestureOut.screen.position -= (gestureOut.screen.position - mLastGesture.screen.position) * 0.5f * (1.0f - mSmoothingAmount);
- gestureOut.local.position -= (gestureOut.local.position - mLastGesture.local.position) * 0.5f * (1.0f - mSmoothingAmount);
- // make current displacement relative to previous update-frame now.
- gestureOut.screen.displacement = gestureOut.screen.position - mLastGesture.screen.position;
- gestureOut.local.displacement = gestureOut.local.position - mLastGesture.local.position;
- // calculate velocity relative to previous update-frame
- float timeDiff( gestureOut.time - mLastGesture.time );
- gestureOut.screen.velocity = gestureOut.screen.displacement / timeDiff;
- gestureOut.local.velocity = gestureOut.local.displacement / timeDiff;
- }
-}
-
-void PanGesture::SmoothingAlgorithm2(bool justStarted, PanInfo& gestureOut, unsigned int lastVSyncTime)
-{
- // push back result
- mPredictionHistory.push_back(gestureOut);
-
- // now smooth current pan event
- PanInfoHistoryConstIter endIter = mPredictionHistory.end() - 1;
- PanInfoHistoryIter iter = mPredictionHistory.begin();
-
- float distanceMod = 1.0f;
- float weight = 0.8f;
- while( iter != endIter )
- {
- PanInfo currentGesture = *iter;
- float newDistanceMod = currentGesture.screen.displacement.Length() / gestureOut.screen.displacement.Length();
- distanceMod = ((distanceMod * weight) + (newDistanceMod * (1.0f - weight)));
- weight -= 0.15f;
- ++iter;
- }
- gestureOut.screen.position -= gestureOut.screen.displacement;
- gestureOut.local.position -= gestureOut.local.displacement;
- gestureOut.screen.displacement *= distanceMod;
- gestureOut.local.displacement *= distanceMod;
- gestureOut.screen.position += gestureOut.screen.displacement;
- gestureOut.local.position += gestureOut.local.displacement;
+ gesture.screen.position -= ( gesture.screen.position - lastGesture.screen.position ) * 0.5f * ( 1.0f - blendValue );
+ gesture.local.position -= ( gesture.local.position - lastGesture.local.position ) * 0.5f * ( 1.0f - blendValue );
+ // Make current displacement relative to previous update-frame now.
+ gesture.screen.displacement = gesture.screen.position - lastGesture.screen.position;
+ gesture.local.displacement = gesture.local.position - lastGesture.local.position;
+ // Calculate velocity relative to previous update-frame
+ float timeDifference( gesture.time - lastGesture.time );
+ gesture.screen.velocity = gesture.screen.displacement / timeDifference;
+ gesture.local.velocity = gesture.local.displacement / timeDifference;
}
bool PanGesture::ReadGestures( FrameGestureInfo& info, unsigned int currentTimestamp )
bool PanGesture::ReadAndResampleGestures( FrameGestureInfo& info, unsigned int currentTimestamp )
{
PanInfo lastReadGesture;
+ Dali::Mutex::ScopedLock lock( mMutex );
while( mReadPosition != mWritePosition )
{
// Copy the gesture first
bool PanGesture::UpdateProperties( unsigned int lastVSyncTime, unsigned int nextVSyncTime )
{
+ if( mPredictionMode == PREDICTION_2 )
+ {
+ // TODO: Have the two prediction modes share more behavior so some parts of mode 2 can
+ // be used with mode 1 etc. Needs code moving and more importantly testing.
+ return NewAlgorithm( lastVSyncTime, nextVSyncTime );
+ }
+
if( !mInGesture )
{
// clear current pan history
}
PanInfo frameGesture = frameInfo.frameGesture;
- mLastUnmodifiedGesture = frameGesture;
+ PanInfo unmodifiedGesture = frameGesture;
// Process input data.
mInGesture |= frameInfo.justStarted;
}
// Perform prediction.
- switch( mPredictionMode )
+ if( mPredictionMode == PREDICTION_1 )
{
- case PREDICTION_NONE:
+ // Dynamically change the prediction amount according to the pan velocity acceleration.
+ if( !frameInfo.justStarted )
{
- break;
- }
- case PREDICTION_1:
- {
- // Dynamically change the prediction amount according to the pan velocity acceleration.
- if( !frameInfo.justStarted )
+ if( frameInfo.eventsThisFrame <= 1 )
{
- if( frameInfo.eventsThisFrame <= 1 )
- {
- frameInfo.acceleration = frameGesture.screen.velocity.Length() - mLastUnmodifiedGesture.screen.velocity.Length();
- }
+ frameInfo.acceleration = frameGesture.screen.velocity.Length() - mLastUnmodifiedGesture.screen.velocity.Length();
+ }
- // Ignore tiny velocity fluctuation to avoid unnecessary prediction amount change
- if( fabsf( frameInfo.acceleration ) > ACCELERATION_THRESHOLD )
+ // Ignore tiny velocity fluctuation to avoid unnecessary prediction amount change
+ if( fabsf( frameInfo.acceleration ) > ACCELERATION_THRESHOLD )
+ {
+ mCurrentPredictionAmount += mPredictionAmountAdjustment * ( frameInfo.acceleration > Math::MACHINE_EPSILON_0 ? 1.0f : -1.0f );
+ if( mCurrentPredictionAmount > mMaxPredictionAmount + mPredictionAmountAdjustment ) // Guard against unsigned int overflow
{
- mCurrentPredictionAmount += mPredictionAmountAdjustment * ( frameInfo.acceleration > Math::MACHINE_EPSILON_0 ? 1.0f : -1.0f );
- if( mCurrentPredictionAmount > mMaxPredictionAmount + mPredictionAmountAdjustment ) // Guard against unsigned int overflow
- {
- mCurrentPredictionAmount = 0;
- }
+ mCurrentPredictionAmount = 0;
}
}
- else
+ }
+ else
+ {
+ if( !mPredictionAmountOverridden )
{
- mCurrentPredictionAmount = mPredictionAmount; // Reset the prediction amount for each new gesture
+ // If the prediction amount has not been modified, default to the correct amount for this algorithm.
+ mPredictionAmount = DEFAULT_PREDICTION_AMOUNT[0];
}
+ mCurrentPredictionAmount = mPredictionAmount; // Reset the prediction amount for each new gesture
+ }
- mCurrentPredictionAmount = std::max( mMinPredictionAmount, std::min( mCurrentPredictionAmount, mMaxPredictionAmount ) );
+ mCurrentPredictionAmount = std::max( mMinPredictionAmount, std::min( mCurrentPredictionAmount, mMaxPredictionAmount ) );
+
+ // Calculate the delta of positions before the prediction
+ Vector2 deltaPosition = frameGesture.screen.position - mLastUnmodifiedGesture.screen.position;
+
+ // Make latest gesture equal to current gesture before interpolation
+ PredictionMode1( frameInfo.eventsThisFrame, frameGesture, mPanHistory, lastVSyncTime, nextVSyncTime );
- // Calculate the delta of positions before the prediction
- Vector2 deltaPosition = frameGesture.screen.position - mLastUnmodifiedGesture.screen.position;
+ // Calculate the delta of positions after the prediction.
+ Vector2 deltaPredictedPosition = frameGesture.screen.position - mLastGesture.screen.position;
- // Make latest gesture equal to current gesture before interpolation
- PredictiveAlgorithm1( frameInfo.eventsThisFrame, frameGesture, mPanHistory, lastVSyncTime, nextVSyncTime );
+ // If the change in the prediction has a different sign than the change in the actual position,
+ // there is overshot (i.e. the current prediction is too large). Return the previous prediction
+ // to give the user's finger a chance to catch up with where we have panned to.
+ bool overshotXAxis = false;
+ bool overshotYAxis = false;
+ if( (deltaPosition.x > Math::MACHINE_EPSILON_0 && deltaPredictedPosition.x < Math::MACHINE_EPSILON_0 )
+ || (deltaPosition.x < Math::MACHINE_EPSILON_0 && deltaPredictedPosition.x > Math::MACHINE_EPSILON_0 ) )
+ {
+ overshotXAxis = true;
+ frameGesture.screen.position.x = mLastGesture.screen.position.x;
+ }
- // Calculate the delta of positions after the prediction.
- Vector2 deltaPredictedPosition = frameGesture.screen.position - mLastGesture.screen.position;
+ if( (deltaPosition.y > Math::MACHINE_EPSILON_0 && deltaPredictedPosition.y < Math::MACHINE_EPSILON_0 )
+ || (deltaPosition.y < Math::MACHINE_EPSILON_0 && deltaPredictedPosition.y > Math::MACHINE_EPSILON_0 ) )
+ {
+ overshotYAxis = true;
+ frameGesture.screen.position.y = mLastGesture.screen.position.y;
+ }
- // If the change in the prediction has a different sign than the change in the actual position,
- // there is overshot (i.e. the current prediction is too large). Return the previous prediction
- // to give the user's finger a chance to catch up with where we have panned to.
- bool overshotXAxis = false;
- bool overshotYAxis = false;
- if( (deltaPosition.x > Math::MACHINE_EPSILON_0 && deltaPredictedPosition.x < Math::MACHINE_EPSILON_0 )
- || (deltaPosition.x < Math::MACHINE_EPSILON_0 && deltaPredictedPosition.x > Math::MACHINE_EPSILON_0 ) )
+ // If there is overshot in one axis, reduce the possible overshot in the other axis,
+ // and reduce the prediction amount so that it doesn't overshoot as easily next time.
+ if(overshotXAxis || overshotYAxis)
+ {
+ mCurrentPredictionAmount -= mPredictionAmountAdjustment;
+ if( mCurrentPredictionAmount > mMaxPredictionAmount + mPredictionAmountAdjustment ) // Guard against unsigned int overflow
{
- overshotXAxis = true;
- frameGesture.screen.position.x = mLastGesture.screen.position.x;
+ mCurrentPredictionAmount = 0;
}
+ mCurrentPredictionAmount = std::max( mMinPredictionAmount, std::min( mCurrentPredictionAmount, mMaxPredictionAmount ) );
- if( (deltaPosition.y > Math::MACHINE_EPSILON_0 && deltaPredictedPosition.y < Math::MACHINE_EPSILON_0 )
- || (deltaPosition.y < Math::MACHINE_EPSILON_0 && deltaPredictedPosition.y > Math::MACHINE_EPSILON_0 ) )
+ if( overshotXAxis && !overshotYAxis )
{
- overshotYAxis = true;
- frameGesture.screen.position.y = mLastGesture.screen.position.y;
+ frameGesture.screen.position.y = ( mLastGesture.screen.position.y + frameGesture.screen.position.y ) * 0.5f;
}
- // If there is overshot in one axis, reduce the possible overshot in the other axis,
- // and reduce the prediction amount so that it doesn't overshoot as easily next time.
- if(overshotXAxis || overshotYAxis)
+ if( overshotYAxis && !overshotXAxis )
{
- mCurrentPredictionAmount -= mPredictionAmountAdjustment;
- if( mCurrentPredictionAmount > mMaxPredictionAmount + mPredictionAmountAdjustment ) // Guard against unsigned int overflow
- {
- mCurrentPredictionAmount = 0;
- }
- mCurrentPredictionAmount = std::max( mMinPredictionAmount, std::min( mCurrentPredictionAmount, mMaxPredictionAmount ) );
-
- if( overshotXAxis && !overshotYAxis )
- {
- frameGesture.screen.position.y = ( mLastGesture.screen.position.y + frameGesture.screen.position.y ) * 0.5f;
- }
-
- if( overshotYAxis && !overshotXAxis )
- {
- frameGesture.screen.position.x = ( mLastGesture.screen.position.x + frameGesture.screen.position.x ) * 0.5f;
- }
+ frameGesture.screen.position.x = ( mLastGesture.screen.position.x + frameGesture.screen.position.x ) * 0.5f;
}
-
- updateProperties = true;
- break;
}
+
+ updateProperties = true;
}
// Perform smoothing.
switch( mSmoothingMode )
{
case SMOOTHING_NONE:
+ case SMOOTHING_MULTI_TAP:
{
// No smoothing
+ // TODO: Old algorithm to be able to use multitap smoothing.
break;
}
case SMOOTHING_LAST_VALUE:
{
- SmoothingAlgorithm1( frameInfo.justStarted, frameGesture, lastVSyncTime );
+ if( !frameInfo.justStarted )
+ {
+ if( !mSmoothingAmountOverridden )
+ {
+ // If the smoothing amount has not been modified, default to the correct amount for this algorithm.
+ mSmoothingAmount = DEFAULT_SMOOTHING_AMOUNT[0];
+ }
+ BlendPoints( frameGesture, mLastGesture, mSmoothingAmount );
+ }
break;
}
}
}
mLastGesture = frameGesture;
+ mLastUnmodifiedGesture = unmodifiedGesture;
mInGesture &= ~frameInfo.justFinished;
if( mProfiling && frameInfo.justFinished )
void PanGesture::SetPredictionAmount(unsigned int amount)
{
mPredictionAmount = amount;
+ mPredictionAmountOverridden = true;
}
void PanGesture::SetMaximumPredictionAmount(unsigned int amount)
void PanGesture::SetSmoothingAmount(float amount)
{
mSmoothingAmount = amount;
+ mSmoothingAmountOverridden = true;
+}
+
+void PanGesture::SetUseActualTimes( bool value )
+{
+ mUseActualTimes = value;
+}
+
+void PanGesture::SetInterpolationTimeRange( int value )
+{
+ mInterpolationTimeRange = value;
+}
+
+void PanGesture::SetScalarOnlyPredictionEnabled( bool value )
+{
+ mScalarOnlyPredictionEnabled = value;
+}
+
+void PanGesture::SetTwoPointPredictionEnabled( bool value )
+{
+ mTwoPointPredictionEnabled = value;
+}
+
+void PanGesture::SetTwoPointInterpolatePastTime( int value )
+{
+ mTwoPointPastInterpolateTime = value;
+}
+
+void PanGesture::SetTwoPointVelocityBias( float value )
+{
+ mTwoPointVelocityBias = value;
+}
+
+void PanGesture::SetTwoPointAccelerationBias( float value )
+{
+ mTwoPointAccelerationBias = value;
+}
+
+void PanGesture::SetMultitapSmoothingRange( int value )
+{
+ mMultiTapSmoothingRange = value;
}
void PanGesture::EnableProfiling()
mReadPosition( 0 ),
mNotAtTarget( false ),
mInGesture( false ),
+ mPredictionAmountOverridden( false ),
+ mSmoothingAmountOverridden( false ),
+ mProfiling( NULL ),
+
+ // Set environment variable defaults:
mPredictionMode( DEFAULT_PREDICTION_MODE ),
- mPredictionAmount( DEFAULT_PREDICTION_INTERPOLATION ),
- mCurrentPredictionAmount( DEFAULT_PREDICTION_INTERPOLATION ),
- mMaxPredictionAmount( DEFAULT_MAX_PREDICTION_INTERPOLATION ),
- mMinPredictionAmount( DEFAULT_MIN_PREDICTION_INTERPOLATION ),
- mPredictionAmountAdjustment( DEFAULT_PREDICTION_INTERPOLATION_ADJUSTMENT ),
+ mPredictionAmount( DEFAULT_PREDICTION_AMOUNT[0] ),
+ mCurrentPredictionAmount( DEFAULT_PREDICTION_AMOUNT[0] ),
+ mMaxPredictionAmount( DEFAULT_MAX_PREDICTION_AMOUNT ),
+ mMinPredictionAmount( DEFAULT_MIN_PREDICTION_AMOUNT ),
+ mPredictionAmountAdjustment( DEFAULT_PREDICTION_AMOUNT_ADJUSTMENT ),
mSmoothingMode( DEFAULT_SMOOTHING_MODE ),
- mSmoothingAmount( DEFAULT_SMOOTHING_AMOUNT ),
- mProfiling( NULL )
+ mSmoothingAmount( DEFAULT_SMOOTHING_AMOUNT[0] ),
+ mUseActualTimes( DEFAULT_USE_ACTUAL_TIMES ),
+ mInterpolationTimeRange( DEFAULT_INTERPOLATION_TIME_RANGE ),
+ mScalarOnlyPredictionEnabled( DEFAULT_SCALAR_ONLY_PREDICTION_ENABLED ),
+ mTwoPointPredictionEnabled( DEFAULT_TWO_POINT_PREDICTION_ENABLED ),
+ mTwoPointPastInterpolateTime( DEFAULT_TWO_POINT_PAST_INTERPOLATE_TIME ),
+ mTwoPointVelocityBias( DEFAULT_TWO_POINT_VELOCITY_BIAS ),
+ mTwoPointAccelerationBias( DEFAULT_TWO_POINT_ACCELERATION_BIAS ),
+ mMultiTapSmoothingRange( DEFAULT_MULTITAP_SMOOTHING_RANGE )
+{
+
+}
+
+// Prediction mode 2 related code and functions follow:
+
+unsigned int PanGesture::ReadFrameEvents()
+{
+ unsigned int eventsThisFrame;
+ // Copy the events into a linear buffer while holding the mutex.
+ // This is so the lock is not held while any processing is done.
+ Dali::Mutex::ScopedLock lock( mMutex );
+ for( eventsThisFrame = 0; mReadPosition != mWritePosition; ++eventsThisFrame )
+ {
+ mReadGestures[ eventsThisFrame ] = mGestures[ mReadPosition ];
+ ++mReadPosition;
+ mReadPosition %= PAN_GESTURE_HISTORY;
+ }
+ return eventsThisFrame;
+}
+
+// TODO: eventsThisFrame parameter can be removed if we use a smarter container.
+bool PanGesture::InputRateConversion( PanInfo& rateConvertedGesture, unsigned int eventsThisFrame,
+ unsigned int currentFrameTime, unsigned int lastFrameTime, bool& justStarted, bool& justFinished )
+{
+ // TODO: Lots of variables on the stack. Needs optimizing.
+ PanInfo readGesture;
+ PanInfo firstReadGesture;
+ unsigned int eventsKeptThisFrame = 0;
+
+ for( unsigned int readPosition = 0; readPosition < eventsThisFrame; ++readPosition )
+ {
+ // Copy the gesture first
+ readGesture = mReadGestures[ readPosition ];
+
+ if( mProfiling )
+ {
+ mProfiling->mRawData.push_back( PanGestureProfiling::Position( readGesture.time, readGesture.screen.position,
+ readGesture.screen.displacement, readGesture.screen.velocity, readGesture.state ) );
+ }
+
+ if( readGesture.state == Gesture::Started )
+ {
+ // Clear pan data.
+ mPanHistory.clear();
+ mPredictionHistory.clear();
+ mLastAcceleration.local = Vector2::ZERO;
+ mLastAcceleration.screen = Vector2::ZERO;
+ mLastInterpolatedAcceleration.local = Vector2::ZERO;
+ mLastInterpolatedAcceleration.screen = Vector2::ZERO;
+ mLastInitialAcceleration.local = Vector2::ZERO;
+ mLastInitialAcceleration.screen = Vector2::ZERO;
+ PanInfo startInfo;
+ mLastGesture = startInfo;
+ mLastSecondInterpolatedPoint = startInfo;
+ mLastPredictedPoint = startInfo;
+ mLastFrameReadGesture = startInfo;
+ rateConvertedGesture = startInfo;
+ firstReadGesture = readGesture;
+ eventsKeptThisFrame = 0;
+ mNotAtTarget = false;
+ justFinished = false;
+ justStarted = true;
+ mInGesture = true;
+
+ if( !mPredictionAmountOverridden )
+ {
+ // If the prediction amount has not been modified, default to the correct amount for this algorithm.
+ mPredictionAmount = DEFAULT_PREDICTION_AMOUNT[1];
+ }
+ mCurrentPredictionAmount = mPredictionAmount;
+ }
+ else
+ {
+ justFinished |= ( readGesture.state == Gesture::Finished || readGesture.state == Gesture::Cancelled );
+ }
+
+ rateConvertedGesture.screen.position += readGesture.screen.position;
+ rateConvertedGesture.local.position += readGesture.local.position;
+ rateConvertedGesture.screen.velocity += readGesture.screen.velocity;
+ rateConvertedGesture.local.velocity += readGesture.local.velocity;
+ rateConvertedGesture.screen.displacement += readGesture.screen.displacement;
+ rateConvertedGesture.local.displacement += readGesture.local.displacement;
+
+ ++eventsKeptThisFrame;
+ }
+
+ bool storeGesture = false;
+ if( eventsKeptThisFrame > 0 )
+ {
+ // Some events were read this frame.
+ if( eventsKeptThisFrame > 1 )
+ {
+ float eventDivisor( eventsKeptThisFrame );
+ rateConvertedGesture.screen.position /= eventDivisor;
+ rateConvertedGesture.local.position /= eventDivisor;
+ rateConvertedGesture.screen.velocity /= eventDivisor;
+ rateConvertedGesture.local.velocity /= eventDivisor;
+ rateConvertedGesture.screen.displacement /= eventDivisor;
+ rateConvertedGesture.local.displacement /= eventDivisor;
+
+ mTargetGesture = readGesture;
+ mNotAtTarget = true;
+ }
+ else
+ {
+ mNotAtTarget = false;
+ }
+
+ rateConvertedGesture.time = currentFrameTime;
+ storeGesture = true;
+ }
+ else
+ {
+ // We did not get any event this frame.
+ // If we just started (or aren't in a gesture), exit.
+ if( !mInGesture || justStarted )
+ {
+ // We cannot guess what the event could be as we have no other events to base the guess from.
+ return false;
+ }
+
+ // As we are currently in a gesture, we can estimate an event.
+ readGesture = mLastFrameReadGesture;
+ readGesture.time = currentFrameTime;
+
+ // Take the last event, halve the acceleration, and use that.
+ const float accelerationDegrade = 2.0f;
+ Vector2 degradedAccelerationLocal( mLastAcceleration.local /= accelerationDegrade );
+ Vector2 degradedAccelerationScreen( mLastAcceleration.screen /= accelerationDegrade );
+
+ float outputTimeGranularity( GetDivisibleTimeDifference( currentFrameTime, lastFrameTime, 1.0f, OUTPUT_TIME_DIFFERENCE ) );
+
+ readGesture.local.velocity = degradedAccelerationLocal * outputTimeGranularity;
+ readGesture.local.displacement = readGesture.local.velocity * outputTimeGranularity;
+ readGesture.local.position = mLastFrameReadGesture.local.position + readGesture.local.displacement;
+ readGesture.screen.velocity = degradedAccelerationScreen * outputTimeGranularity;
+ readGesture.screen.displacement = readGesture.screen.velocity * outputTimeGranularity;
+ readGesture.screen.position = mLastFrameReadGesture.screen.position + readGesture.screen.displacement;
+
+ rateConvertedGesture = readGesture;
+ eventsKeptThisFrame = 1;
+ storeGesture = true;
+ }
+
+ if( eventsKeptThisFrame > 0 )
+ {
+ // Store last read gesture.
+ readGesture.time = currentFrameTime;
+ mLastFrameReadGesture = readGesture;
+
+ if( eventsKeptThisFrame > 2 )
+ {
+ DALI_LOG_WARNING( "Got events this frame:%d (more than 2 will compromise result)\n", eventsKeptThisFrame );
+ }
+ }
+
+ if( storeGesture )
+ {
+ // Store final converted result.
+ mPanHistory.push_back( rateConvertedGesture );
+ }
+ return true;
+}
+
+bool PanGesture::InterpolatePoint( PanInfoHistory& history, unsigned int currentTime, unsigned int targetTime, unsigned int range,
+ PanInfo& outPoint, RelativeVectors& acceleration, int outputTimeGranularity, bool eraseUnused )
+{
+ unsigned int discardInputBufferTime = targetTime + range;
+ unsigned int maxHistoryTime = targetTime - range;
+ unsigned int tapsUsed = 0;
+ outPoint.time = targetTime;
+ float divisor = 0.0f;
+ float accelerationDivisor = 0.0f;
+ PanInfoHistoryIter historyBegin = history.begin();
+ PanInfoHistoryIter lastIt = history.end();
+ bool pointGenerated = false;
+ RelativeVectors newAcceleration;
+
+ // Iterate through point history to perform interpolation.
+ for( PanInfoHistoryIter it = historyBegin; it != history.end(); )
+ {
+ unsigned int gestureTime = it->time;
+
+ if( gestureTime < maxHistoryTime )
+ {
+ // Too far in the past, discard.
+ // Clean history as we go (if requested).
+ if( eraseUnused )
+ {
+ it = history.erase( it );
+ }
+ else
+ {
+ ++it;
+ continue;
+ }
+ }
+ else
+ {
+ if( gestureTime > discardInputBufferTime )
+ {
+ // Too far in the future, discard (and finish).
+ break;
+ }
+
+ float timeDelta( static_cast<float>( abs( int( targetTime - gestureTime ) ) ) );
+ // Handle low time deltas.
+ if( timeDelta < 1.0f )
+ {
+ timeDelta = 1.0f;
+ }
+
+ outPoint.local.position += it->local.position / timeDelta;
+ outPoint.screen.position += it->screen.position / timeDelta;
+ outPoint.local.velocity += it->local.velocity / timeDelta;
+ outPoint.screen.velocity += it->screen.velocity / timeDelta;
+ outPoint.local.displacement += it->local.displacement / timeDelta;
+ outPoint.screen.displacement += it->screen.displacement / timeDelta;
+
+ divisor += 1.0f / timeDelta;
+
+ // Acceleration requires a previous point.
+ if( lastIt != history.end() )
+ {
+ // Time delta of input.
+ float timeDifference( GetDivisibleTimeDifference( it->time, lastIt->time, 1.0f, OUTPUT_TIME_DIFFERENCE ) );
+
+ newAcceleration.local += ( ( it->local.velocity - lastIt->local.velocity ) / timeDifference ) / timeDelta;
+ newAcceleration.screen += ( ( it->screen.velocity - lastIt->screen.velocity ) / timeDifference ) / timeDelta;
+
+ accelerationDivisor += 1.0f / timeDelta;
+ }
+
+ tapsUsed++;
+ lastIt = it;
+ ++it;
+ }
+ }
+
+ // Divide results by their respective divisors.
+ if( tapsUsed > 0 )
+ {
+ if( divisor > 0.0f )
+ {
+ outPoint.local.position /= divisor;
+ outPoint.screen.position /= divisor;
+ outPoint.local.velocity /= divisor;
+ outPoint.screen.velocity /= divisor;
+ outPoint.local.displacement /= divisor;
+ outPoint.screen.displacement /= divisor;
+ }
+
+ if( tapsUsed > 1 )
+ {
+ if( accelerationDivisor > 0.0f )
+ {
+ newAcceleration.local /= accelerationDivisor;
+ newAcceleration.screen /= accelerationDivisor;
+ }
+
+ float accelerationSmoothing( ACCELERATION_SMOOTHING );
+ newAcceleration.local = ( acceleration.local * accelerationSmoothing ) + ( newAcceleration.local * ( 1.0f - accelerationSmoothing ) );
+ newAcceleration.screen = ( acceleration.screen * accelerationSmoothing ) + ( newAcceleration.screen * ( 1.0f - accelerationSmoothing ) );
+ }
+ else
+ {
+ // If we just started, last velocity was 0. So difference of zero to current velocity over time gives acceleration of the first point.
+ newAcceleration.local = outPoint.local.velocity / outputTimeGranularity;
+ newAcceleration.screen = outPoint.screen.velocity / outputTimeGranularity;
+ }
+ pointGenerated = true;
+ }
+
+ acceleration.local = newAcceleration.local;
+ acceleration.screen = newAcceleration.screen;
+ return pointGenerated;
+}
+
+float PanGesture::GetDivisibleTimeDifference( int timeA, int timeB, float minimumDelta, float overrideDifference )
+{
+ float timeDifference( overrideDifference );
+ if( mUseActualTimes )
+ {
+ timeDifference = static_cast<float>( abs( timeA - timeB ) );
+ if( timeDifference < minimumDelta )
+ {
+ timeDifference = minimumDelta;
+ }
+ }
+ return timeDifference;
+}
+
+void PanGesture::LimitAccelerationChange( RelativeVectors& currentAcceleration, RelativeVectors& lastAcceleration, float changeLimit )
+{
+ // We don't use the float parameter version of clamp here, as that will create the capping vectors twice in total.
+ Vector2 capMinimum( -changeLimit, -changeLimit );
+ Vector2 capMaximum( changeLimit, changeLimit );
+ Vector2 accelerationDeltaLocal( currentAcceleration.local - lastAcceleration.local );
+ Vector2 accelerationDeltaScreen( currentAcceleration.screen - lastAcceleration.screen );
+ accelerationDeltaLocal.Clamp( capMinimum, capMaximum );
+ accelerationDeltaScreen.Clamp( capMinimum, capMaximum );
+ currentAcceleration.local = lastAcceleration.local + accelerationDeltaLocal;
+ currentAcceleration.screen = lastAcceleration.screen + accelerationDeltaScreen;
+}
+
+void PanGesture::PredictionMode2( PanInfo& startPoint, RelativeVectors& accelerationToUse,
+ PanInfo& predictedPoint, unsigned int currentFrameTime, unsigned int previousFrameTime, bool noPreviousData )
+{
+ // Do the prediction (based on mode).
+ if( mScalarOnlyPredictionEnabled )
+ {
+ // We are doing scalar based prediction.
+ // This divisor is to help tuning by giving the scalar only result
+ // a similar prediction amount to the integrated result.
+ float scalarVelocityMultiplier = static_cast<float>( mCurrentPredictionAmount ) / 1.364f;
+ predictedPoint.local.position = startPoint.local.position + ( startPoint.local.velocity * scalarVelocityMultiplier );
+ predictedPoint.screen.position = startPoint.screen.position + ( startPoint.screen.velocity * scalarVelocityMultiplier );
+ }
+ else
+ {
+ // We are doing integration based prediction.
+ float predictionDelta( mCurrentPredictionAmount );
+
+ predictedPoint.local.position = startPoint.local.position + ( startPoint.local.velocity * predictionDelta ) +
+ ( accelerationToUse.local * ( predictionDelta * predictionDelta * 0.5f ) );
+ predictedPoint.screen.position = startPoint.screen.position + ( startPoint.screen.velocity * predictionDelta ) +
+ ( accelerationToUse.screen * ( predictionDelta * predictionDelta * 0.5f ) );
+ }
+
+ // Calculate remaining gesture data from the result.
+ float timeDifference( GetDivisibleTimeDifference( currentFrameTime, previousFrameTime, 1.0f, OUTPUT_TIME_DIFFERENCE ) );
+ if( noPreviousData )
+ {
+ predictedPoint.local.displacement = predictedPoint.local.position - startPoint.local.position;
+ predictedPoint.screen.displacement = predictedPoint.screen.position - startPoint.screen.position;
+ }
+ else
+ {
+ predictedPoint.local.displacement = predictedPoint.local.position - mLastPredictedPoint.local.position;
+ predictedPoint.screen.displacement = predictedPoint.screen.position - mLastPredictedPoint.screen.position;
+ }
+ predictedPoint.local.velocity = predictedPoint.local.displacement / timeDifference;
+ predictedPoint.screen.velocity = predictedPoint.screen.displacement / timeDifference;
+
+ // TODO: Experimental - not used at run time. Left in code for reference only.
+ if( TEST_TUNE_ENABLE_OVERSHOOT_PROTECTION )
+ {
+ // Overshoot protection
+ if( !noPreviousData )
+ {
+ if( ( mLastPredictedPoint.local.velocity.x > Math::MACHINE_EPSILON_0 && predictedPoint.local.velocity.x < Math::MACHINE_EPSILON_0 )
+ || ( mLastPredictedPoint.local.velocity.x < Math::MACHINE_EPSILON_0 && predictedPoint.local.velocity.x > Math::MACHINE_EPSILON_0 ) )
+ {
+ predictedPoint.local.position.x = mLastPredictedPoint.local.position.x;
+ predictedPoint.screen.position.x = mLastPredictedPoint.screen.position.x;
+ mPredictionHistory.clear();
+ }
+ if( ( mLastPredictedPoint.local.velocity.y > Math::MACHINE_EPSILON_0 && predictedPoint.local.velocity.y < Math::MACHINE_EPSILON_0 )
+ || ( mLastPredictedPoint.local.velocity.y < Math::MACHINE_EPSILON_0 && predictedPoint.local.velocity.y > Math::MACHINE_EPSILON_0 ) )
+ {
+ predictedPoint.local.position.y = mLastPredictedPoint.local.position.y;
+ predictedPoint.screen.position.y = mLastPredictedPoint.screen.position.y;
+ mPredictionHistory.clear();
+ }
+ }
+ }
+
+ predictedPoint.time = currentFrameTime;
+ mLastPredictedPoint = predictedPoint;
+}
+
+// TODO: This needs a better name! It is called this instead of prediction mode 2 because:
+// 1) It is the entire workflow, not just prediction.
+// 2) To make it less confusing as there is a function that does prediction alone called PerformPredictionMode2.
+// Ultimately we need to combine the old and new code modularly so there is one code path that can optionally run different functions based on configuration.
+// At the moment, the differences between the inputs & outputs of these different functions prevent that, but this can be resolved.
+bool PanGesture::NewAlgorithm( unsigned int lastVSyncTime, unsigned int nextVSyncTime )
{
+ if( !mInGesture )
+ {
+ // clear current pan history
+ mPanHistory.clear();
+ mPredictionHistory.clear();
+ }
+
+ /*#########################################################################################
+ #### Read in all gestures received this frame first (holding a lock for a short time)
+ #########################################################################################*/
+
+ unsigned int eventsThisFrame = ReadFrameEvents();
+
+ /*#########################################################################################
+ #### Perform input rate-conversion on all gestures received this frame.
+ #### This also populates the pan history.
+ #########################################################################################*/
+
+ bool justStarted = false;
+ bool justFinished = false;
+ PanInfo rateConvertedGesture;
+ if( !InputRateConversion( rateConvertedGesture, eventsThisFrame, nextVSyncTime, lastVSyncTime, justStarted, justFinished ) )
+ {
+ // There's nothing we can do with the input, exit.
+ return false;
+ }
+
+ /*#########################################################################################
+ #### If we are in gesture, Get first interpolated point with: target time = current time
+ #########################################################################################*/
+
+ bool performUpdate = false;
+ RelativeVectors currentAcceleration;
+ currentAcceleration.local = mLastInitialAcceleration.local;
+ currentAcceleration.screen = mLastInitialAcceleration.screen;
+
+ if( mInGesture || justStarted )
+ {
+ // Get first interpolated point.
+ // TODO: Erase time should be maximum of both interpolated point ranges in past.
+ PanInfo targetPoint;
+ float outputTimeGranularity( GetDivisibleTimeDifference( nextVSyncTime, lastVSyncTime, 1.0f, OUTPUT_TIME_DIFFERENCE ) );
+ bool pointGenerated = InterpolatePoint( mPanHistory, nextVSyncTime, nextVSyncTime, mInterpolationTimeRange,
+ targetPoint, currentAcceleration, outputTimeGranularity, true );
+ if( pointGenerated )
+ {
+ mLastInitialAcceleration.local = currentAcceleration.local;
+ mLastInitialAcceleration.screen = currentAcceleration.screen;
+ performUpdate = true;
+ }
+ else
+ {
+ targetPoint = rateConvertedGesture;
+ currentAcceleration.local = mLastInitialAcceleration.local;
+ currentAcceleration.screen = mLastInitialAcceleration.screen;
+ // TODO: Potentially do something to substitute lack of generated point (and perform update).
+ }
+
+ /*#########################################################################################
+ #### Limit the change of acceleration of the first interpolated point since last time
+ #########################################################################################*/
+
+ if( !justStarted )
+ {
+ LimitAccelerationChange( currentAcceleration, mLastAcceleration, ACCELERATION_CAP );
+ }
+ mLastAcceleration.local = currentAcceleration.local;
+ mLastAcceleration.screen = currentAcceleration.screen;
+
+ /*#########################################################################################
+ #### Get second interpolated point, and blend the resultant velocity and acceleration (optional)
+ #########################################################################################*/
+
+ PanInfo outPoint;
+ RelativeVectors interpolatedAcceleration;
+ if( mTwoPointPredictionEnabled )
+ {
+ // Get second interpolated point with target time = current time - past interpolate time.
+ unsigned int pastInterpolateTime = nextVSyncTime - mTwoPointPastInterpolateTime;
+ PanInfo outPoint;
+ RelativeVectors interpolatedAcceleration;
+ interpolatedAcceleration.local = mLastInterpolatedAcceleration.local;
+ interpolatedAcceleration.screen = mLastInterpolatedAcceleration.screen;
+ if( !InterpolatePoint( mPanHistory, nextVSyncTime, pastInterpolateTime, mTwoPointPastInterpolateTime,
+ outPoint, interpolatedAcceleration, outputTimeGranularity, false ) )
+ {
+ if( justStarted )
+ {
+ outPoint = targetPoint;
+ }
+ else
+ {
+ outPoint = mLastSecondInterpolatedPoint;
+ }
+ }
+ mLastInterpolatedAcceleration.local = interpolatedAcceleration.local;
+ mLastInterpolatedAcceleration.screen = interpolatedAcceleration.screen;
+ mLastSecondInterpolatedPoint = outPoint;
+
+ // Combine the first interpolated point and the second interpolated point.
+ // by mixing them with the configured amount. This is done for acceleration and velocity.
+ // It could be optionally done for position too, but this typically is worse as it means we have to predict further ahead.
+ float currentVelocityMultiplier( 1.0f - mTwoPointVelocityBias );
+ float lastVelocityMultiplier( mTwoPointVelocityBias );
+ targetPoint.local.velocity = ( outPoint.local.velocity * lastVelocityMultiplier ) + ( targetPoint.local.velocity * currentVelocityMultiplier );
+ targetPoint.screen.velocity = ( outPoint.screen.velocity * lastVelocityMultiplier ) + ( targetPoint.screen.velocity * currentVelocityMultiplier );
+ float currentAccelerationMultiplier( 1.0f - mTwoPointAccelerationBias );
+ float lastAccelerationMultiplier( mTwoPointAccelerationBias );
+ currentAcceleration.local = ( interpolatedAcceleration.local * lastAccelerationMultiplier ) + ( currentAcceleration.local * currentAccelerationMultiplier );
+ currentAcceleration.screen = ( interpolatedAcceleration.screen * lastAccelerationMultiplier ) + ( currentAcceleration.screen * currentAccelerationMultiplier );
+ }
+
+ /*#########################################################################################
+ #### Perform prediction
+ #########################################################################################*/
+
+ PanInfo predictedPoint;
+ PredictionMode2( targetPoint, currentAcceleration, predictedPoint, nextVSyncTime, lastVSyncTime, justStarted );
+ targetPoint = predictedPoint;
+
+ /*#########################################################################################
+ #### Smoothing
+ #########################################################################################*/
+
+ // If we are using multi-tap smoothing, keep a history of predicted results.
+ if( mSmoothingMode == SMOOTHING_MULTI_TAP )
+ {
+ mPredictionHistory.push_back( targetPoint );
+ }
+
+ if( !justStarted )
+ {
+ float outputTimeGranularity( GetDivisibleTimeDifference( nextVSyncTime, lastVSyncTime, 1.0f, OUTPUT_TIME_DIFFERENCE ) );
+ if( mSmoothingMode == SMOOTHING_MULTI_TAP )
+ {
+ // Perform Multi-tap Smoothing.
+ RelativeVectors blank;
+ InterpolatePoint( mPredictionHistory, nextVSyncTime, nextVSyncTime, mMultiTapSmoothingRange,
+ targetPoint, blank, outputTimeGranularity, true );
+ }
+ else
+ {
+ // Perform Single-tap Smoothing.
+ if( !mSmoothingAmountOverridden )
+ {
+ // If the smoothing amount has not been modified, default to the correct amount for this algorithm.
+ mSmoothingAmount = DEFAULT_SMOOTHING_AMOUNT[1];
+ }
+ BlendPoints( targetPoint, mLastGesture, mSmoothingAmount );
+ }
+
+ /*#########################################################################################
+ #### Finalize other point data (from position)
+ #########################################################################################*/
+
+ targetPoint.local.displacement = targetPoint.local.position - mLastGesture.local.position;
+ targetPoint.local.velocity = targetPoint.local.displacement / outputTimeGranularity;
+ targetPoint.screen.displacement = targetPoint.screen.position - mLastGesture.screen.position;
+ targetPoint.screen.velocity = targetPoint.screen.displacement / outputTimeGranularity;
+ }
+
+ /*#########################################################################################
+ #### Send out the new point, by setting the properties
+ #### (Constraints will automatically react to this)
+ #########################################################################################*/
+
+ if( performUpdate )
+ {
+ mPanning.Set( mInGesture & !justFinished );
+ mScreenPosition.Set( targetPoint.screen.position );
+ mScreenDisplacement.Set( targetPoint.screen.displacement );
+ mScreenVelocity.Set( targetPoint.screen.velocity );
+ mLocalPosition.Set( targetPoint.local.position );
+ mLocalDisplacement.Set( targetPoint.local.displacement );
+ mLocalVelocity.Set( targetPoint.local.velocity );
+
+ mLastGesture = targetPoint;
+
+ if( mProfiling )
+ {
+ mProfiling->mAveragedData.push_back( PanGestureProfiling::Position( targetPoint.time, targetPoint.screen.position,
+ targetPoint.screen.displacement, targetPoint.screen.velocity, targetPoint.state ) );
+ }
+ }
+ }
+
+ mInGesture &= ~justFinished;
+
+ return performUpdate;
}
*/
// INTERNAL INCLUDES
+#include <dali/devel-api/threading/mutex.h>
#include <dali/public-api/common/vector-wrapper.h>
#include <dali/public-api/events/pan-gesture.h>
#include <dali/internal/update/common/property-owner.h>
enum PredictionMode
{
PREDICTION_NONE = 0,
- PREDICTION_1
+ PREDICTION_1,
+ PREDICTION_2,
};
enum SmoothingMode
{
- SMOOTHING_NONE, // no smoothing
- SMOOTHING_LAST_VALUE, // smooth between last value and latest value
+ SMOOTHING_NONE, // No smoothing.
+ SMOOTHING_LAST_VALUE, // Smooth between last value and latest value.
+ SMOOTHING_MULTI_TAP, // Uses multitap smoothing, only available with Prediction mode 2.
};
static const PredictionMode DEFAULT_PREDICTION_MODE;
typedef PanInfoHistory::const_iterator PanInfoHistoryConstIter;
private:
- static const unsigned int PAN_GESTURE_HISTORY = 10u;
+ static const unsigned int PAN_GESTURE_HISTORY = 30u;
public:
/**
* Uses elapsed time and time stamps
*/
- void PredictiveAlgorithm1(int eventsThisFrame, PanInfo& gestureOut, PanInfoHistory& panHistory, unsigned int lastVSyncTime, unsigned int nextVSyncTime);
+ void PredictionMode1(int eventsThisFrame, PanInfo& gestureOut, PanInfoHistory& panHistory, unsigned int lastVSyncTime, unsigned int nextVSyncTime);
/**
- * Uses last two gestures
+ * Blends two points together.
+ * The blend value ranges are:
+ * 0.0f = use 100% of current value.
+ * 1.0f = use half-way average of current and last value.
*
- * @param[in] justStarted Whether the pan has just started.
- * @param[out] gestureOut Output gesture using average values from last two gestures
- * @param[in] lastVSyncTime The time to set on gestureOut.
+ * @param[in,out] gesture Pass in current gesture, outputs result of blend.
+ * @param[in] lastGesture Pass in gesture to blend between.
*/
- void SmoothingAlgorithm1(bool justStarted, PanInfo& gestureOut, unsigned int lastVSyncTime);
-
- /**
- * Future smoothing method, implementation not complete
- */
- void SmoothingAlgorithm2(bool justStarted, PanInfo& gestureOut, unsigned int lastVSyncTime);
+ void BlendPoints( PanInfo& gesture, PanInfo& lastGesture, float blendValue );
/**
* Called by the update manager so that we can update the value of our properties.
*/
void SetSmoothingAmount(float amount);
+ /*
+ * @brief Sets whether to use actual times of the real gesture and frames or not.
+ *
+ * @param[in] value True = use actual times, False = use perfect values
+ */
+ void SetUseActualTimes( bool value );
+
+ /**
+ * @brief Sets the interpolation time range (ms) of past points to use (with weights) when interpolating.
+ *
+ * @param[in] value Time range in ms
+ */
+ void SetInterpolationTimeRange( int value );
+
+ /**
+ * @brief Sets whether to use scalar only prediction, which when enabled, ignores acceleration.
+ *
+ * @param[in] value True = use scalar prediction only
+ */
+ void SetScalarOnlyPredictionEnabled( bool value );
+
+ /**
+ * @brief Sets whether to use two point prediction. This combines two interpolated points to get more steady acceleration and velocity values.
+ *
+ * @param[in] value True = use two point prediction
+ */
+ void SetTwoPointPredictionEnabled( bool value );
+
+ /**
+ * @brief Sets the time in the past to interpolate the second point when using two point interpolation.
+ *
+ * @param[in] value Time in past in ms
+ */
+ void SetTwoPointInterpolatePastTime( int value );
+
+ /**
+ * @brief Sets the two point velocity bias. This is the ratio of first and second points to use for velocity.
+ *
+ * @param[in] value 0.0f = 100% first point. 1.0f = 100% of second point.
+ */
+ void SetTwoPointVelocityBias( float value );
+
+ /**
+ * @brief Sets the two point acceleration bias. This is the ratio of first and second points to use for acceleration.
+ *
+ * @param[in] value 0.0f = 100% first point. 1.0f = 100% of second point.
+ */
+ void SetTwoPointAccelerationBias( float value );
+
+ /**
+ * @brief Sets the range of time (ms) of points in the history to perform multitap smoothing with (if enabled).
+ *
+ * @param[in] value Time in past in ms
+ */
+ void SetMultitapSmoothingRange( int value );
+
/**
* Called to provide pan-gesture profiling information.
*/
// Undefined
PanGesture(const PanGesture&);
+private:
+
+ // Struct to keep pairs of local and screen data together.
+ // TODO: This can encapsulate some functionality also.
+ typedef struct
+ {
+ Vector2 local;
+ Vector2 screen;
+ } RelativeVectors;
+
+ /**
+ * Houses new code to process input events and generate an output point.
+ *
+ * @param[in] lastVSyncTime The time of the last render (in milliseconds)
+ * @param[in] nextVSyncTime The estimated time of the next render (in milliseconds)
+ */
+ bool NewAlgorithm( unsigned int lastVSyncTime, unsigned int nextVSyncTime );
+
+ /**
+ * Gets the (absolute) time difference between two times.
+ * This is limited by minimumDelta, so it can be safe to use as a divisor.
+ * This function is wrapped so that the behviour can be overridden to return a "perfect" time difference (overrideDifference).
+ *
+ * @param[in] timeA The first time to calculate from
+ * @param[in] timeB The second time to calculate from
+ * @param[in] minimumDelta The smallest amount the difference can become
+ * @param[in] overrideDifference The time difference to return if using perfect times
+ */
+ inline float GetDivisibleTimeDifference( int timeA, int timeB, float minimumDelta, float overrideDifference );
+
+ /**
+ * This limits the change currentAcceleration can have over lastAcceleration by the specified changeLimit value.
+ *
+ * @param[in] currentAcceleration The acceleration to modify
+ * @param[in] lastAcceleration The acceleration to limit against
+ * @param[in] changeLimit The maximum change (in either direction)
+ */
+ void LimitAccelerationChange( RelativeVectors& currentAcceleration, RelativeVectors& lastAcceleration, float changeLimit );
+
+ /**
+ * Reads all events received this frame into a linear buffer.
+ * A lock is held while this is done.
+ */
+ unsigned int ReadFrameEvents();
+
+ /**
+ * Converts between input rate and internal rate (typically 60Hz internally).
+ * Also writes to the pan history container.
+ * TODO: Does not need to return the gesture if it is in the history also, but currently it's used.
+ * (if rate conversion does not generate a point there are points still in history, but this can been done with a bool property).
+ *
+ * @param[out] rateConvertedGesture Result gesture for this frame is writen here.
+ * @param[in] eventsThisFrame Number of events to convert
+ * @param[in] currentFrameTime Time of the frame we will render to
+ * @param[in] lastFrameTime Time of the last rendered frame
+ * @param[out] justStarted Set to true if we are now starting a new gesture
+ * @param[out] justFinished Set to true if we are now finishing a gesture
+ */
+ bool InputRateConversion( PanInfo& rateConvertedGesture, unsigned int eventsThisFrame,
+ unsigned int currentFrameTime, unsigned int lastFrameTime, bool& justStarted, bool& justFinished );
+
+ /**
+ * Generates an interpolated point at the specified point in time.
+ *
+ * @param[in] history of points to use
+ * @param[in] currentTime Time of the frame we will render to
+ * @param[in] targetTime Time of the point to generate
+ * @param[in] range Range of time (each side of target time) to use points from
+ * @param[out] outPoint Generated point
+ * @param[out] acceleration Generated acceleration
+ * @param[in] outputTimeGranularity Time difference between output point (typically 60Hz)
+ * @param[in] eraseUnused Set to true to clean up any history not used by the function
+ */
+ bool InterpolatePoint( PanInfoHistory& history, unsigned int currentTime, unsigned int targetTime, unsigned int range,
+ PanInfo& outPoint, RelativeVectors& acceleration, int outputTimeGranularity, bool eraseUnused );
+
+ /**
+ * Predicts a point in the future, based on the supplied point and acceleration.
+ * Other user configuration settings are considered.
+ *
+ * @param[in] startPoint Starting point to use. Position and velocity are taken from here.
+ * @param[in] accelerationToUse The acceleration to use.
+ * @param[out] predictedPoint Generated predicted point
+ * @param[in] currentFrameTime Time of the frame we will render to
+ * @param[in] previousFrameTime Time of the last rendered frame
+ * @param[in] noPreviousData Set to true if we are just starting a gesture
+ */
+ void PredictionMode2( PanInfo& startPoint, RelativeVectors& accelerationToUse,
+ PanInfo& predictedPoint, unsigned int currentFrameTime, unsigned int previousFrameTime, bool noPreviousData );
+
+private:
+
// Undefined
PanGesture& operator=(const PanGesture&);
PanInfoHistory mPanHistory;
PanInfoHistory mPredictionHistory;
PanInfo mGestures[PAN_GESTURE_HISTORY]; ///< Circular buffer storing the 4 most recent gestures.
+ PanInfo mReadGestures[PAN_GESTURE_HISTORY]; ///< Linear buffer storing the most recent gestures (to reduce read lock time).
PanInfo mLastGesture; ///< The last gesture. (last update frame).
PanInfo mTargetGesture; ///< The most recent input gesture, if the current used gesture does not match.
PanInfo mLastUnmodifiedGesture; ///< The last gesture before any processing was done on it.
- unsigned int mWritePosition; ///< The next PanInfo buffer to write to. (starts at 0).
+ PanInfo mLastSecondInterpolatedPoint; ///< Stores the last second interpolated point we generated.
+ PanInfo mLastFrameReadGesture; ///< Stores the last gesture read.
+ PanInfo mLastPredictedPoint; ///< Stores the last predicted point we generated.
+ RelativeVectors mLastAcceleration; ///< Stores the acceleration value from the acceleration limiting last frame.
+ RelativeVectors mLastInterpolatedAcceleration; ///< Stores the second interpolated point acceleration value from the last frame.
+ RelativeVectors mLastInitialAcceleration; ///< Stores the initial acceleration value from the last frame.
+
+ volatile unsigned int mWritePosition; ///< The next PanInfo buffer to write to. (starts at 0).
unsigned int mReadPosition; ///< The next PanInfo buffer to read. (starts at 0).
bool mNotAtTarget; ///< Keeps track of if the last gesture used was the most recent received.
bool mInGesture; ///< True if the gesture is currently being handled i.e. between Started <-> Finished/Cancelled.
+ bool mPredictionAmountOverridden;
+ bool mSmoothingAmountOverridden;
+
+ PanGestureProfiling* mProfiling; ///< NULL unless pan-gesture profiling information is required.
+ Dali::Mutex mMutex; ///< Mutex to lock access.
+
+ // Environment variables:
PredictionMode mPredictionMode; ///< The pan gesture prediction mode
unsigned int mPredictionAmount; ///< how far into future to predict in milliseconds
unsigned int mPredictionAmountAdjustment; ///< the prediction amount to adjust in milliseconds when pan velocity changes
SmoothingMode mSmoothingMode; ///< The pan gesture prediction mode
float mSmoothingAmount; ///< How much smoothing to apply [0.0f,1.0f]
- PanGestureProfiling* mProfiling; ///< NULL unless pan-gesture profiling information is required.
+ bool mUseActualTimes; ///< Disable to optionally override actual times if they make results worse.
+ int mInterpolationTimeRange; ///< Time into past history (ms) to use points to interpolate the first point.
+ bool mScalarOnlyPredictionEnabled; ///< If enabled, prediction is done using velocity alone (no integration or acceleration).
+ bool mTwoPointPredictionEnabled; ///< If enabled, a second interpolated point is predicted and combined with the first to get more stable values.
+ int mTwoPointPastInterpolateTime; ///< The target time in the past to generate the second interpolated point.
+ float mTwoPointVelocityBias; ///< The ratio of first and second interpolated points to use for velocity. 0.0f = 100% of first point. 1.0f = 100% of second point.
+ float mTwoPointAccelerationBias; ///< The ratio of first and second interpolated points to use for acceleration. 0.0f = 100% of first point. 1.0f = 100% of second point.
+ int mMultiTapSmoothingRange; ///< The range in time (ms) of points in the history to smooth the final output against.
};
} // namespace SceneGraph