In this implementation, detection uses a multistep test. A time test checks that the stroke was not lingering a circle gesture should be quickly drawn. There's an inflection test checking that the touch did not change direction too often. A proper circle includes four direction changes; this test allows for five. There's a convergence test: The circle must start and end close enough together that the points are somehow related. A fair amount of leeway is needed because when you don't provide direct visual feedback, users tend to undershoot or overshoot where they began. The pixel distance used here is generous, approximately a third of the view size.
The final test looks at movement around a central point. It adds up the arcs traveled, which should equal 360 degrees in a perfect circle. This example allows any movement that falls within 45 degrees for not-quite-finished circles and 180 degrees for circles that continue on a bit wider, allowing the finger to travel more naturally.
Upon these tests being passed, the algorithm produces a least-bounding rectangle and centers that rectangle on the geometric mean of the points from the original gesture. This result is assigned to the circle instance variable. It's not a perfect detection system (you can try to fool it when testing the sample code), but it's robust enough to provide reasonably good circle checks for many iOS applications.
Recipe 4: Detecting circles.
// Retrieve center of rectangle CGPoint GEORectGetCenter(CGRect rect) { return CGPointMake(CGRectGetMidX(rect), CGRectGetMidY(rect)); } // Build rectangle around a given center CGRect GEORectAroundCenter(CGPoint center, float dx, float dy) { return CGRectMake(center.x - dx, center.y - dy, dx * 2, dy * 2); } // Center one rect inside another CGRect GEORectCenteredInRect(CGRect rect, CGRect mainRect) { CGFloat dx = CGRectGetMidX(mainRect)-CGRectGetMidX(rect); CGFloat dy = CGRectGetMidY(mainRect)-CGRectGetMidY(rect); return CGRectOffset(rect, dx, dy); } // Return dot product of two vectors normalized CGFloat dotproduct (CGPoint v1, CGPoint v2) { CGFloat dot = (v1.x * v2.x) + (v1.y * v2.y); CGFloat a = ABS(sqrt(v1.x * v1.x + v1.y * v1.y)); CGFloat b = ABS(sqrt(v2.x * v2.x + v2.y * v2.y)); dot /= (a * b); return dot; } // Return distance between two points CGFloat distance (CGPoint p1, CGPoint p2) { CGFloat dx = p2.x - p1.x; CGFloat dy = p2.y - p1.y; return sqrt(dx*dx + dy*dy); } // Offset in X CGFloat dx(CGPoint p1, CGPoint p2) { return p2.x - p1.x; } // Offset in Y CGFloat dy(CGPoint p1, CGPoint p2) { return p2.y - p1.y; } // Sign of a number NSInteger sign(CGFloat x) { return (x < 0.0f) ? (-1) : 1; } // Return a point with respect to a given origin CGPoint pointWithOrigin(CGPoint pt, CGPoint origin) { return CGPointMake(pt.x - origin.x, pt.y - origin.y); } // Calculate and return least bounding rectangle #define POINT(_INDEX_) [(NSValue *)[points \ objectAtIndex:_INDEX_] CGPointValue] CGRect boundingRect(NSArray *points) { CGRect rect = CGRectZero; CGRect ptRect; for (int i = 0; i < points.count; i++) { CGPoint pt = POINT(i); ptRect = CGRectMake(pt.x, pt.y, 0.0f, 0.0f); rect = (CGRectEqualToRect(rect, CGRectZero)) ? ptRect : CGRectUnion(rect, ptRect); } return rect; } CGRect testForCircle(NSArray *points, NSDate *firstTouchDate) { if (points.count < 2) { NSLog(@"Too few points (2) for circle"); return CGRectZero; } // Test 1: duration tolerance float duration = [[NSDate date] timeIntervalSinceDate:firstTouchDate]; NSLog(@"Transit duration: %0.2f", duration); float maxDuration = 2.0f; if (duration > maxDuration) { NSLog(@"Excessive duration"); return CGRectZero; } // Test 2: Direction changes should be limited to near 4 int inflections = 0; for (int i = 2; i < (points.count - 1); i++) { float deltx = dx(POINT(i), POINT(i-1)); float delty = dy(POINT(i), POINT(i-1)); float px = dx(POINT(i-1), POINT(i-2)); float py = dy(POINT(i-1), POINT(i-2)); if ((sign(deltx) != sign(px)) || (sign(delty) != sign(py))) inflections++; } if (inflections > 5) { NSLog(@"Excessive inflections"); return CGRectZero; } // Test 3: Start and end points near each other float tolerance = [[[UIApplication sharedApplication] keyWindow] bounds].size.width / 3.0f; if (distance(POINT(0), POINT(points.count - 1)) > tolerance) { NSLog(@"Start too far from end"); return CGRectZero; } // Test 4: Count the distance traveled in degrees. CGRect circle = boundingRect(points); CGPoint center = GEORectGetCenter(circle); float distance = ABS(acos(dotproduct( pointWithOrigin(POINT(0), center), pointWithOrigin(POINT(1), center)))); for (int i = 1; i < (points.count - 1); i++) distance += ABS(acos(dotproduct( pointWithOrigin(POINT(i), center), pointWithOrigin(POINT(i+1), center)))); float transitTolerance = distance - 2 * M_PI; if (transitTolerance < 0.0f) // fell short of 2 PI { if (transitTolerance < - (M_PI / 4.0f)) // under 45 { NSLog(@"Transit too short"); return CGRectZero; } } if (transitTolerance > M_PI) // additional 180 degrees { NSLog(@"Transit too long "); return CGRectZero; } return circle; } @end
Creating a Custom Gesture Recognizer
It takes little work to transform the code shown in Recipe 4 into a custom recognizer, as introduced in Recipe 5. Subclassing UIGestureRecognizer
enables you to build your own circle recognizer that you can add to views in your applications.
Start by importing UIGestureRecognizerSubclass.h
into your new class. The file declares everything you need your recognizer subclass to override or customize. For each method you override, make sure to call the original version of the method by calling the superclass method before invoking your new code.
Gestures fall into two types: continuous and discrete. The circle recognizer is discrete. It either recognizes a circle or fails. Continuous gestures include pinches and pans, where recognizers send updates throughout their lifecycle. Your recognizer generates updates by setting its state property.
Recognizers are basically state machines for fingertips. All recognizers start in the possible state (UIGestureRecognizerStatePossible
), and then for continuous gestures pass through a series of changed states (UIGestureRecognizerStateChanged
). Discrete recognizers either succeed in recognizing a gesture (UIGestureRecognizerStateRecognized
) or fail (UIGestureRecognizerStateFailed
), as demonstrated in Recipe 5. The recognizer sends actions to its target each time you update state, except when the state is set to possible or failed.
The rather long comments you see in Recipe 5 belong to Apple, courtesy of the subclass header file. I've included them here because they help explain the roles of the key methods that override their superclass. The reset
method returns the recognizer back to its quiescent state, allowing it to prepare itself for its next recognition challenge.
The touches began (and so on) methods are called at similar points as their UIResponder
analogs, enabling you to perform your tests at the same touch lifecycle points. (As an overriding philosophy, gesture recognizers should fail as soon as possible. When they succeed, you should store information about the gesture in local properties. The circle gesture should save any detected circle so users know where the gesture occurred.) The following example waits to check for success or failure until the touches ended callback, and uses the same testForCircle
method defined in Recipe 4.
Recipe 5: Creating a Gesture Recognizer Subclass.
#import <UIKit/UIGestureRecognizerSubclass.h> @implementation CircleRecognizer // called automatically by the runtime after the gesture state has // been set to UIGestureRecognizerStateEnded any internal state // should be reset to prepare for a new attempt to recognize the gesture // after this is received all remaining active touches will be ignored // (no further updates will be received for touches that had already // begun but haven't ended) - (void)reset { [super reset]; points = nil; firstTouchDate = nil; self.state = UIGestureRecognizerStatePossible; } // mirror of the touch-delivery methods on UIResponder // UIGestureRecognizers aren't in the responder chain, but observe // touches hit-tested to their view and their view's subviews // UIGestureRecognizers receive touches before the view to which // the touch was hit-tested - (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event { [super touchesBegan:touches withEvent:event]; if (touches.count > 1) { self.state = UIGestureRecognizerStateFailed; return; } points = [NSMutableArray array]; firstTouchDate = [NSDate date]; UITouch *touch = [touches anyObject]; [points addObject: [NSValue valueWithCGPoint: [touch locationInView:self.view]]]; } - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event { [super touchesMoved:touches withEvent:event]; UITouch *touch = [touches anyObject]; [points addObject: [NSValue valueWithCGPoint: [touch locationInView:self.view]]]; } - (void) touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event { [super touchesEnded:touches withEvent: event]; BOOL detectionSuccess = !CGRectEqualToRect(CGRectZero, testForCircle(points, firstTouchDate)); if (detectionSuccess) self.state = UIGestureRecognizerStateRecognized; else self.state = UIGestureRecognizerStateFailed; } @end
Dragging from a Scroll View
iOS's rich set of gesture recognizers doesn't always accomplish exactly what you're looking for. Here's an example: Imagine a horizontal scrolling view filled with image views, one next to another, so you can scroll left and right to see the entire collection. Now, imagine that you want to be able to drag items out of that view and add them to a space directly below the scrolling area. To do this, you need to recognize downward touches on those child views (that is, orthogonal to the scrolling direction).
This was the puzzle I encountered while trying to help developer Alex Hosgrove, who was trying to build an application roughly equivalent to a set of refrigerator magnet letters. Users could drag those letters down into a workspace and then play with and arrange the items they'd chosen. There were two challenges with this scenario. First, who owned each touch? Second, what happened after the downward touch was recognized?
Both the scroll view and its children own an interest in each touch. A downward gesture should generate new objects; a sideways gesture should pan the scroll view. Touches have to be shared to allow both the scroll view and its children to respond to user interactions. This problem can be solved using gesture delegates.
Gesture delegates allow you to add simultaneous recognition, so that two recognizers can operate at the same time. You add this behavior by declaring a protocol (UIGestureRecognizerDelegate
) and adding a simple delegate method:
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer: (UIGestureRecognizer *)otherGestureRecognizer { return YES; }
You cannot reassign gesture delegates for scroll views, so you must add this delegate override to the implementation for the scroll view's children.
The second question, converting a swipe into a drag, is addressed by thinking about the entire touch lifetime. Each touch that creates a new object starts as a directional drag but ends up as a pan once the new view is created. A pan recognizer works better here than a swipe recognizer, whose lifetime ends at the point of recognition.
To make this happen, Recipe 6 manually adds that directional-movement detection, outside of the built-in gesture detection. In the end, that working-outside-the-box approach provides a major coding win. That's because once the swipe has been detected, the underlying pan gesture recognizer continues to operate. This allows the user to keep moving the swiped object without having to raise his or her finger and retouch the object in question.
This implementation detects swipes that move down at least 16 vertical pixels without straying more than 8 pixels to either side. When this code detects a downward swipe, it adds a new DragView
(the same class used earlier in this article) to the screen and allows it to follow the touch for the remainder of the pan gesture interaction.
At the point of recognition, the class marks itself as having handled the swipe (gestureWasHandled
) and disables the scroll view for the duration of the panning event. This allows the child complete control over the ongoing pan gesture without the scroll view reacting to further touch movement.
Recipe 6: Dragging Items Out of Scroll Views.
@implementation DragView #define DX(p1, p2) (p2.x - p1.x) #define DY(p1, p2) (p2.y - p1.y) #define SWIPE_DRAG_MIN 16 #define DRAGLIMIT_MAX 8 // Categorize swipe types typedef enum { TouchUnknown, TouchSwipeLeft, TouchSwipeRight, TouchSwipeUp, TouchSwipeDown, } SwipeTypes; @implementation PullView // Create a new view with an embedded pan gesture recognizer - (id) initWithImage: (UIImage *) anImage { if (self = [super initWithImage:anImage]) { self.userInteractionEnabled = YES; UIPanGestureRecognizer *pan = [[[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(handlePan:)] autorelease]; pan.delegate = self; self.gestureRecognizers = @[pan]; } // Allow simultaneous recognition - (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer: (UIGestureRecognizer *)otherGestureRecognizer { return YES; } // Handle pans by detecting swipes - (void) handlePan: (UISwipeGestureRecognizer *) uigr { // Only deal with scroll view superviews if (![self.superview isKindOfClass:[UIScrollView class]]) return; // Extract superviews UIView *supersuper = self.superview.superview; UIScrollView *scrollView = (UIScrollView *) self.superview; // Calculate location of touch CGPoint touchLocation = [uigr locationInView:supersuper]; // Handle touch based on recognizer state if(uigr.state == UIGestureRecognizerStateBegan) { // Initialize recognizer gestureWasHandled = NO; pointCount = 1; startPoint = touchLocation; } if(uigr.state == UIGestureRecognizerStateChanged) { pointCount++; // Calculate whether a swipe has occurred float dx = DX(touchLocation, startPoint); float dy = DY(touchLocation, startPoint); BOOL finished = YES; if ((dx > SWIPE_DRAG_MIN) && (ABS(dy) < DRAGLIMIT_MAX)) touchtype = TouchSwipeLeft; else if ((-dx > SWIPE_DRAG_MIN) && (ABS(dy) < DRAGLIMIT_MAX)) touchtype = TouchSwipeRight; else if ((dy > SWIPE_DRAG_MIN) && (ABS(dx) < DRAGLIMIT_MAX)) touchtype = TouchSwipeUp; else if ((-dy > SWIPE_DRAG_MIN) && (ABS(dx) < DRAGLIMIT_MAX)) touchtype = TouchSwipeDown; else finished = NO; // If unhandled and a downward swipe, produce a new draggable view if (!gestureWasHandled && finished && (touchtype == TouchSwipeDown)) { dv = [[DragView alloc] initWithImage:self.image]; dv.center = touchLocation; [supersuper addSubview:dv]; scrollView.scrollEnabled = NO; gestureWasHandled = YES; } else if (gestureWasHandled) { // allow continued dragging after detection dv.center = touchLocation; } } if(uigr.state == UIGestureRecognizerStateEnded) { // ensure that the scroll view returns to scrollable if (gestureWasHandled) scrollView.scrollEnabled = YES; } } @end