Skip to content

Commit

Permalink
Eager Gesture Recognition on Aura
Browse files Browse the repository at this point in the history
This enables eager gesture recognition on Aura for the unified gesture
detector.

BUG=332418
TEST=GestureRecognizer/GestureRecognizerTest

Review URL: https://codereview.chromium.org/393953012

Cr-Commit-Position: refs/heads/master@{#288236}
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@288236 0039d316-1c4b-4281-b951-d872f2087c98
  • Loading branch information
tdresser@chromium.org committed Aug 8, 2014
1 parent d58d5b9 commit 709c57e
Show file tree
Hide file tree
Showing 21 changed files with 392 additions and 102 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ IN_PROC_BROWSER_TEST_F(TouchExplorationTest, NoRewritingEventsWhenOff) {

base::TimeDelta initial_time = Now();
ui::TouchEvent initial_press(
ui::ET_TOUCH_PRESSED, gfx::Point(100, 200), 1, initial_time);
ui::ET_TOUCH_PRESSED, gfx::Point(99, 200), 1, initial_time);
generator.Dispatch(&initial_press);

// Since the touch exploration controller doesn't know if the user is
Expand All @@ -111,7 +111,7 @@ IN_PROC_BROWSER_TEST_F(TouchExplorationTest, NoRewritingEventsWhenOff) {

initial_time = Now();
ui::TouchEvent second_initial_press(
ui::ET_TOUCH_PRESSED, gfx::Point(500, 600), 2, initial_time);
ui::ET_TOUCH_PRESSED, gfx::Point(499, 600), 2, initial_time);
generator.Dispatch(&second_initial_press);
ui::TouchEvent second_touch_time_advance(
ui::ET_TOUCH_MOVED,
Expand Down
6 changes: 5 additions & 1 deletion content/browser/frame_host/render_widget_host_view_guest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -109,8 +109,12 @@ void RenderWidgetHostViewGuest::ProcessAckedTouchEvent(
INPUT_EVENT_ACK_STATE_CONSUMED) ? ui::ER_HANDLED : ui::ER_UNHANDLED;
for (ScopedVector<ui::TouchEvent>::iterator iter = events.begin(),
end = events.end(); iter != end; ++iter) {
if (!ui::GestureRecognizer::Get()->ProcessTouchEventPreDispatch(*(*iter),
this)) {
continue;
}
scoped_ptr<ui::GestureRecognizer::Gestures> gestures;
gestures.reset(gesture_recognizer_->ProcessTouchEventForGesture(
gestures.reset(ui::GestureRecognizer::Get()->ProcessTouchEventPostDispatch(
*(*iter), result, this));
ProcessGestures(gestures.get());
}
Expand Down
1 change: 0 additions & 1 deletion content/browser/renderer_host/input/input_router_impl.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ namespace content {
class InputAckHandler;
class InputRouterClient;
class OverscrollController;
class RenderWidgetHostImpl;
struct DidOverscrollParams;

// A default implementation for browser input event routing.
Expand Down
7 changes: 0 additions & 7 deletions content/browser/renderer_host/ui_events_helper.cc
Original file line number Diff line number Diff line change
Expand Up @@ -310,13 +310,6 @@ blink::WebTouchPoint* UpdateWebTouchEventFromUIEvent(

// Update the location and state of the point.
point->state = TouchPointStateFromEvent(event);
if (point->state == blink::WebTouchPoint::StateMoved) {
// It is possible for badly written touch drivers to emit Move events even
// when the touch location hasn't changed. In such cases, consume the event
// and pretend nothing happened.
if (point->position.x == event.x() && point->position.y == event.y())
return NULL;
}
point->position.x = event.x();
point->position.y = event.y();

Expand Down
18 changes: 5 additions & 13 deletions content/public/test/browser_test_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -294,21 +294,13 @@ void SimulateMouseEvent(WebContents* web_contents,
}

void SimulateTapAt(WebContents* web_contents, const gfx::Point& point) {
const double kTapDurationSeconds =
0.5 * (ui::GestureConfiguration::
min_touch_down_duration_in_seconds_for_click() +
ui::GestureConfiguration::
max_touch_down_duration_in_seconds_for_click());
SyntheticWebTouchEvent touch;
// Set the timestamp to the base::TimeDelta representing the current time.
touch.SetTimestamp(base::TimeTicks::Now() - base::TimeTicks());
touch.PressPoint(point.x(), point.y());
blink::WebGestureEvent tap;
tap.type = blink::WebGestureEvent::GestureTap;
tap.x = point.x();
tap.y = point.y();
RenderWidgetHostImpl* widget_host =
RenderWidgetHostImpl::From(web_contents->GetRenderViewHost());
widget_host->ForwardTouchEventWithLatencyInfo(touch, ui::LatencyInfo());
touch.timeStampSeconds += kTapDurationSeconds;
touch.ReleasePoint(0);
widget_host->ForwardTouchEventWithLatencyInfo(touch, ui::LatencyInfo());
widget_host->ForwardGestureEvent(tap);
}

void SimulateKeyPress(WebContents* web_contents,
Expand Down
47 changes: 47 additions & 0 deletions ui/aura/gestures/gesture_recognizer_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -3927,6 +3927,7 @@ TEST_P(GestureRecognizerTest, GestureEventConsumedTouchMoveScrollTest) {
gfx::Rect bounds(0, 0, 1000, 1000);
scoped_ptr<aura::Window> window(CreateTestWindowWithDelegate(
delegate.get(), -1234, bounds, root_window()));
delegate->set_window(window.get());

ui::TouchEvent press(ui::ET_TOUCH_PRESSED, gfx::Point(0, 0),
kTouchId, tes.Now());
Expand Down Expand Up @@ -4029,6 +4030,7 @@ TEST_P(GestureRecognizerTest, GestureEventConsumedTouchMoveTapTest) {
gfx::Rect bounds(0, 0, 1000, 1000);
scoped_ptr<aura::Window> window(CreateTestWindowWithDelegate(
delegate.get(), -1234, bounds, root_window()));
delegate->set_window(window.get());

ui::TouchEvent press(ui::ET_TOUCH_PRESSED, gfx::Point(0, 0),
kTouchId, tes.Now());
Expand Down Expand Up @@ -4059,6 +4061,7 @@ TEST_P(GestureRecognizerTest, GestureEventConsumedTouchMoveLongPressTest) {
gfx::Rect bounds(100, 200, kWindowWidth, kWindowHeight);
scoped_ptr<aura::Window> window(CreateTestWindowWithDelegate(
delegate.get(), -1234, bounds, root_window()));
delegate->set_window(window.get());

delegate->Reset();

Expand Down Expand Up @@ -4155,6 +4158,7 @@ TEST_P(GestureRecognizerTest, ScrollAlternatelyConsumedTest) {
gfx::Rect bounds(0, 0, kWindowWidth, kWindowHeight);
scoped_ptr<aura::Window> window(CreateTestWindowWithDelegate(
delegate.get(), -1234, bounds, root_window()));
delegate->set_window(window.get());

delegate->Reset();

Expand Down Expand Up @@ -4512,6 +4516,49 @@ TEST_P(GestureRecognizerTest, GestureEventSmallPinchEnabled) {
ui::ET_GESTURE_PINCH_UPDATE);
}

// Tests that delaying the ack of a touch release doesn't trigger a long press
// gesture.
TEST_P(GestureRecognizerTest, EagerGestureDetection) {
if (!UsingUnifiedGR())
return;

scoped_ptr<QueueTouchEventDelegate> delegate(
new QueueTouchEventDelegate(host()->dispatcher()));
TimedEvents tes;
const int kTouchId = 2;
gfx::Rect bounds(100, 200, 100, 100);
scoped_ptr<aura::Window> window(CreateTestWindowWithDelegate(
delegate.get(), -1234, bounds, root_window()));
delegate->set_window(window.get());

delegate->Reset();
ui::TouchEvent press(ui::ET_TOUCH_PRESSED, gfx::Point(101, 201),
kTouchId, tes.Now());
DispatchEventUsingWindowDispatcher(&press);
ui::TouchEvent release(ui::ET_TOUCH_RELEASED, gfx::Point(101, 201),
kTouchId, tes.LeapForward(50));
DispatchEventUsingWindowDispatcher(&release);

delegate->Reset();
// Ack the touch press.
delegate->ReceivedAck();
EXPECT_TRUE(delegate->tap_down());

delegate->Reset();
// Wait until the long press event would fire (if we weren't eager).
base::MessageLoop::current()->PostDelayedTask(
FROM_HERE,
base::MessageLoop::QuitClosure(),
base::TimeDelta::FromSecondsD(
ui::GestureConfiguration::long_press_time_in_seconds() * 1.1));
base::MessageLoop::current()->Run();

// Ack the touch release.
delegate->ReceivedAck();
EXPECT_TRUE(delegate->tap());
EXPECT_FALSE(delegate->long_press());
}

INSTANTIATE_TEST_CASE_P(GestureRecognizer,
GestureRecognizerTest,
::testing::Bool());
Expand Down
42 changes: 33 additions & 9 deletions ui/aura/window_event_dispatcher.cc
Original file line number Diff line number Diff line change
Expand Up @@ -160,9 +160,12 @@ DispatchDetails WindowEventDispatcher::DispatchMouseExitAtPoint(
void WindowEventDispatcher::ProcessedTouchEvent(ui::TouchEvent* event,
Window* window,
ui::EventResult result) {
scoped_ptr<ui::GestureRecognizer::Gestures> gestures;
gestures.reset(ui::GestureRecognizer::Get()->
ProcessTouchEventForGesture(*event, result, window));
ui::TouchEvent orig_event(*event, window, this->window());
// Once we've fully migrated to the eager gesture detector, we won't need to
// pass an event here.
scoped_ptr<ui::GestureRecognizer::Gestures> gestures(
ui::GestureRecognizer::Get()->ProcessTouchEventOnAsyncAck(
orig_event, result, window));
DispatchDetails details = ProcessGestures(gestures.get());
if (details.dispatcher_destroyed)
return;
Expand Down Expand Up @@ -489,13 +492,20 @@ ui::EventDispatchDetails WindowEventDispatcher::PostDispatchEvent(
// being dispatched.
if (dispatching_held_event_ || !held_move_event_ ||
!held_move_event_->IsTouchEvent()) {
ui::TouchEvent orig_event(static_cast<const ui::TouchEvent&>(event),
static_cast<Window*>(event.target()), window());
// Get the list of GestureEvents from GestureRecognizer.
// If the event is being handled asynchronously, ignore it.
if(event.result() & ui::ER_CONSUMED)
return details;
scoped_ptr<ui::GestureRecognizer::Gestures> gestures;
gestures.reset(ui::GestureRecognizer::Get()->
ProcessTouchEventForGesture(orig_event, event.result(),
static_cast<Window*>(target)));

// Once we've fully migrated to the eager gesture detector, we won't
// need to pass an event here.
ui::TouchEvent orig_event(static_cast<const ui::TouchEvent&>(event),
static_cast<Window*>(event.target()),
window());
gestures.reset(
ui::GestureRecognizer::Get()->ProcessTouchEventPostDispatch(
orig_event, event.result(), static_cast<Window*>(target)));

return ProcessGestures(gestures.get());
}
}
Expand Down Expand Up @@ -870,6 +880,20 @@ void WindowEventDispatcher::PreDispatchTouchEvent(Window* target,
NOTREACHED();
break;
}

if (dispatching_held_event_ || !held_move_event_ ||
!held_move_event_->IsTouchEvent()) {
ui::TouchEvent orig_event(*event, target, window());

// If the touch event is invalid in some way, the gesture recognizer will
// reject it. In this case, stop the touch from reaching the next event
// phase.
if (!ui::GestureRecognizer::Get()->ProcessTouchEventPreDispatch(orig_event,
target)) {
event->SetHandled();
}
}

PreDispatchLocatedEvent(target, event);
}

Expand Down
17 changes: 9 additions & 8 deletions ui/aura/window_event_dispatcher_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -737,12 +737,11 @@ TEST_F(WindowEventDispatcherTest, TouchMovesHeld) {
scoped_ptr<aura::Window> window(CreateTestWindowWithDelegate(
&delegate, 1, gfx::Rect(50, 50, 100, 100), root_window()));

const gfx::Point touch_location(60, 60);
// Starting the touch and throwing out the first few events, since the system
// is going to generate synthetic mouse events that are not relevant to the
// test.
ui::TouchEvent touch_pressed_event(
ui::ET_TOUCH_PRESSED, touch_location, 0, ui::EventTimeForNow());
ui::ET_TOUCH_PRESSED, gfx::Point(10, 10), 0, ui::EventTimeForNow());
DispatchEventUsingWindowDispatcher(&touch_pressed_event);
recorder.WaitUntilReceivedEvent(ui::ET_GESTURE_SHOW_PRESS);
recorder.Reset();
Expand All @@ -751,9 +750,11 @@ TEST_F(WindowEventDispatcherTest, TouchMovesHeld) {

// Check that we don't immediately dispatch the TOUCH_MOVED event.
ui::TouchEvent touch_moved_event(
ui::ET_TOUCH_MOVED, touch_location, 0, ui::EventTimeForNow());
ui::TouchEvent touch_moved_event2 = touch_moved_event;
ui::TouchEvent touch_moved_event3 = touch_moved_event;
ui::ET_TOUCH_MOVED, gfx::Point(10, 10), 0, ui::EventTimeForNow());
ui::TouchEvent touch_moved_event2(
ui::ET_TOUCH_MOVED, gfx::Point(11, 10), 0, ui::EventTimeForNow());
ui::TouchEvent touch_moved_event3(
ui::ET_TOUCH_MOVED, gfx::Point(12, 10), 0, ui::EventTimeForNow());

DispatchEventUsingWindowDispatcher(&touch_moved_event);
EXPECT_TRUE(recorder.events().empty());
Expand All @@ -771,7 +772,7 @@ TEST_F(WindowEventDispatcherTest, TouchMovesHeld) {
// If another touch event occurs then the held touch should be dispatched
// immediately before it.
ui::TouchEvent touch_released_event(
ui::ET_TOUCH_RELEASED, touch_location, 0, ui::EventTimeForNow());
ui::ET_TOUCH_RELEASED, gfx::Point(10, 10), 0, ui::EventTimeForNow());
recorder.Reset();
host()->dispatcher()->HoldPointerMoves();
DispatchEventUsingWindowDispatcher(&touch_moved_event3);
Expand Down Expand Up @@ -1720,8 +1721,8 @@ TEST_F(WindowEventDispatcherTest, WindowHideCancelsActiveGestures) {
"TOUCH_CANCELLED GESTURE_PINCH_END GESTURE_END TOUCH_CANCELLED "
"GESTURE_SCROLL_END GESTURE_END";
expected_ugr =
"TOUCH_CANCELLED GESTURE_SCROLL_END GESTURE_END GESTURE_END "
"TOUCH_CANCELLED";
"TOUCH_CANCELLED GESTURE_SCROLL_END GESTURE_END TOUCH_CANCELLED "
"GESTURE_END";

events_string = EventTypesToString(recorder.GetAndResetEvents());
EXPECT_TRUE((expected == events_string) || (expected_ugr == events_string));
Expand Down
8 changes: 4 additions & 4 deletions ui/aura/window_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -828,9 +828,9 @@ TEST_F(WindowTest, TouchCaptureCancelsOtherTouches) {
&delegate2, 0, gfx::Rect(50, 50, 50, 50), root_window()));

// Press on w1.
ui::TouchEvent press(
ui::TouchEvent press1(
ui::ET_TOUCH_PRESSED, gfx::Point(10, 10), 0, getTime());
DispatchEventUsingWindowDispatcher(&press);
DispatchEventUsingWindowDispatcher(&press1);
// We will get both GESTURE_BEGIN and GESTURE_TAP_DOWN.
EXPECT_EQ(2, delegate1.gesture_event_count());
delegate1.ResetCounts();
Expand All @@ -842,13 +842,13 @@ TEST_F(WindowTest, TouchCaptureCancelsOtherTouches) {
delegate1.ResetCounts();
delegate2.ResetCounts();

// Events now go to w2.
// Events are ignored by w2, as it's receiving a partial touch stream.
ui::TouchEvent move(ui::ET_TOUCH_MOVED, gfx::Point(10, 20), 0, getTime());
DispatchEventUsingWindowDispatcher(&move);
EXPECT_EQ(0, delegate1.gesture_event_count());
EXPECT_EQ(0, delegate1.touch_event_count());
EXPECT_EQ(0, delegate2.gesture_event_count());
EXPECT_EQ(1, delegate2.touch_event_count());
EXPECT_EQ(0, delegate2.touch_event_count());

ui::TouchEvent release(
ui::ET_TOUCH_RELEASED, gfx::Point(10, 20), 0, getTime());
Expand Down
6 changes: 6 additions & 0 deletions ui/events/BUILD.gn
Original file line number Diff line number Diff line change
Expand Up @@ -291,6 +291,12 @@ test("events_unittests") {
]
}

if (use_aura) {
sources += [
"gestures/gesture_provider_aura_unittest.cc",
]
}

deps = [
":events",
":events_base",
Expand Down
2 changes: 2 additions & 0 deletions ui/events/events.gyp
Original file line number Diff line number Diff line change
Expand Up @@ -319,6 +319,7 @@
'event_rewriter_unittest.cc',
'event_unittest.cc',
'gestures/fling_curve_unittest.cc',
'gestures/gesture_provider_aura_unittest.cc',
'gestures/motion_event_aura_unittest.cc',
'gestures/velocity_calculator_unittest.cc',
'gesture_detection/bitset_32_unittest.cc',
Expand Down Expand Up @@ -346,6 +347,7 @@
}],
['use_aura==0', {
'sources!': [
'gestures/gesture_provider_aura_unittest.cc',
'gestures/motion_event_aura_unittest.cc',
'gestures/velocity_calculator_unittest.cc',
],
Expand Down
5 changes: 4 additions & 1 deletion ui/events/gesture_detection/gesture_config_helper_aura.cc
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,10 @@ ScaleGestureDetector::Config DefaultScaleGestureDetectorConfig() {

GestureProvider::Config DefaultGestureProviderConfig() {
GestureProvider::Config config;
config.display = gfx::Screen::GetNativeScreen()->GetPrimaryDisplay();
gfx::Screen* screen = gfx::Screen::GetScreenByType(gfx::SCREEN_TYPE_NATIVE);
// |screen| is sometimes NULL during tests.
if (screen)
config.display = screen->GetPrimaryDisplay();
config.gesture_detector_config = DefaultGestureDetectorConfig();
config.scale_gesture_detector_config = DefaultScaleGestureDetectorConfig();
config.gesture_begin_end_types_enabled = true;
Expand Down
26 changes: 9 additions & 17 deletions ui/events/gesture_detection/gesture_provider.cc
Original file line number Diff line number Diff line change
Expand Up @@ -639,7 +639,11 @@ void GestureProvider::InitGestureDetectors(const Config& config) {
}

bool GestureProvider::CanHandle(const MotionEvent& event) const {
return event.GetAction() == MotionEvent::ACTION_DOWN || current_down_event_;
// Aura requires one cancel event per touch point, whereas Android requires
// one cancel event per touch sequence. Thus we need to allow extra cancel
// events.
return event.GetAction() == MotionEvent::ACTION_DOWN || current_down_event_ ||
event.GetAction() == MotionEvent::ACTION_CANCEL;
}

void GestureProvider::Fling(const MotionEvent& event,
Expand Down Expand Up @@ -672,7 +676,8 @@ void GestureProvider::Send(GestureEventData gesture) {
// are SHOW_PRESS and TAP, potentially triggered by the double-tap
// delay timing out.
DCHECK(current_down_event_ || gesture.type() == ET_GESTURE_TAP ||
gesture.type() == ET_GESTURE_SHOW_PRESS);
gesture.type() == ET_GESTURE_SHOW_PRESS ||
gesture.type() == ET_GESTURE_END);

// TODO(jdduke): Provide a way of skipping this clamping for stylus and/or
// mouse-based input, perhaps by exposing the source type on MotionEvent.
Expand Down Expand Up @@ -782,21 +787,8 @@ void GestureProvider::OnTouchEventHandlingEnd(const MotionEvent& event) {
// |Fling()| will have already signalled an end to touch-scrolling.
EndTouchScrollIfNecessary(event, true);

const gfx::RectF bounding_box = GetBoundingBox(event);

if (gesture_begin_end_types_enabled_) {
for (size_t i = 0; i < event.GetPointerCount(); ++i) {
Send(CreateGesture(ET_GESTURE_END,
event.GetId(),
event.GetEventTime(),
event.GetX(i),
event.GetY(i),
event.GetRawX(i),
event.GetRawY(i),
event.GetPointerCount() - i,
bounding_box));
}
}
if (gesture_begin_end_types_enabled_)
Send(CreateGesture(ET_GESTURE_END, event));

current_down_event_.reset();

Expand Down
Loading

0 comments on commit 709c57e

Please sign in to comment.