一、
Source path:  svn trunksrccombadlogicgamedevsamples/ MultitouchSample.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118


package com.badlogic.gamedev.samples;

import javax.microedition.khronos.opengles.GL10;

import android.opengl.GLU;
import android.os.Bundle;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;

import com.badlogic.gamedev.tools.GameActivity;
import com.badlogic.gamedev.tools.GameListener;
import com.badlogic.gamedev.tools.Mesh;
import com.badlogic.gamedev.tools.Mesh.PrimitiveType;

public class MultitouchSample extends GameActivity implements GameListener
{
        Mesh rect;
        int[] touchX = new int[10];
        int[] touchY = new int[10];            
        boolean[] touched = new boolean[10];
       
        public void onCreate( Bundle bundle )
        {
                super.onCreate( bundle );
                setGameListener( this );
        }

        @Override
        public boolean onTouch(View v, MotionEvent event)
        {                      
                int action = event.getAction();
            int ptrId = event.getPointerId(0);
            if(event.getPointerCount() > 1)
                ptrId = (action & MotionEvent.ACTION_POINTER_ID_MASK) >> MotionEvent.ACTION_POINTER_ID_SHIFT;
            action = action & MotionEvent.ACTION_MASK;
            if(action < 7 && action > 4)
                action = action - 5;                      
               
                if( action == MotionEvent.ACTION_DOWN )
                {
                        for( int i = 0; i < event.getPointerCount(); i++ )
                        {
                                float x = event.getX(i);
                            float y = event.getY(i);
                           
                                touchX[event.getPointerId(i)] = (int)x;
                                touchY[event.getPointerId(i)] = (int)y;
                        }                      

                        touched[ptrId] = true;
                        Log.d( "Multitouch", "down, ptr: " + ptrId );
                }
                if( action == MotionEvent.ACTION_MOVE )
                {                                                      
                        for( int i = 0; i < event.getPointerCount(); i++ )
                        {
                                float x = event.getX(i);
                            float y = event.getY(i);
                           
                                touchX[event.getPointerId(i)] = (int)x;
                                touchY[event.getPointerId(i)] = (int)y;                        
                        }                                                                                      
                }
                if( action == MotionEvent.ACTION_UP )
                {
                        touched[ptrId] = false;
                       
                        if( event.getPointerCount() == 1 )
                                for( int i = 0; i < 10; i++ )
                                        touched[i] = false;
                        Log.d( "Multitouch", "up, ptr: " + ptrId );
                }
                if( action == MotionEvent.ACTION_CANCEL )
                {
                        touched[ptrId] = false;
                        if( event.getPointerCount() == 1 )
                        for( int i = 0; i < 10; i++ )
                                touched[i] = false;
                }
               
                return true;
        }
       
        @Override
        public void mainLoopIteration(GameActivity activity, GL10 gl)
        {
                gl.glViewport( 0, 0, activity.getViewportWidth(), activity.getViewportHeight() );
                gl.glClear( GL10.GL_COLOR_BUFFER_BIT );
                gl.glMatrixMode( GL10.GL_PROJECTION );
                gl.glLoadIdentity();
                GLU.gluOrtho2D(gl, 0, activity.getViewportWidth(), 0, activity.getViewportHeight() );
                gl.glMatrixMode( GL10.GL_MODELVIEW );
                gl.glLoadIdentity();
               
                for( int i = 0; i < touched.length; i++ )
                {
                        if( touched[i] )
                        {
                                gl.glPushMatrix();
                                gl.glTranslatef( touchX[i], activity.getViewportHeight() - touchY[i], 0 );
                                rect.render(PrimitiveType.TriangleFan);
                                gl.glPopMatrix();
                        }
                }                      
        }

        @Override
        public void setup(GameActivity activity, GL10 gl)
        {      
                rect = new Mesh( gl, 4, false, false, false );
                rect.vertex( -80, -80, 0 );
                rect.vertex( -80, 80, 0 );
                rect.vertex( 80, 80, 0 );
                rect.vertex( 80, -80, 0 );
               
        }
}


Start here if you haven't read it already, it goes over a number of things dealing with multitouch in Android: http://android-developers.blogspot.com/2010/06/making-sense-of-multitouch.html

A few things about your posted code:

  1. You'll never see ACTION_DOWN with a pointer count of 2. ACTION_DOWN is only sent for the first pointer that goes down. All fingers that touch the screen after the first will send ACTION_POINTER_DOWN.
  2. Don't assume only up to 2 finger touch, there can easily be more.
  3. It's a lot easier to work with the masked action (use MotionEvent#getActionMasked()) than to code for each pointer index individually.
  4. In the end, indices only matter for pulling data out of a MotionEvent. If you're tracking pointer movement over time, use the pointer ID.
  5. Pointer IDs are just numbers. Don't make assumptions as to what values they will have other than that they will be integers from 0 and up.
@Override
    public boolean onTouchEvent(MotionEvent ev) {
        // Let the ScaleGestureDetector inspect all events.
        mScaleDetector.onTouchEvent(ev);

        final int action = ev.getAction();
        switch (action & MotionEvent.ACTION_MASK) {
        case MotionEvent.ACTION_DOWN: {
            final float x = ev.getX();
            final float y = ev.getY();

            mLastTouchX = x;
            mLastTouchY = y;
            mActivePointerId = ev.getPointerId(0);
            break;
        }

        case MotionEvent.ACTION_MOVE: {
            final int pointerIndex = ev.findPointerIndex(mActivePointerId);
            final float x = ev.getX(pointerIndex);
            final float y = ev.getY(pointerIndex);

            // Only move if the ScaleGestureDetector isn't processing a gesture.
            if (!mScaleDetector.isInProgress()) {
                final float dx = x - mLastTouchX;
                final float dy = y - mLastTouchY;

                mPosX += dx;
                mPosY += dy;

                invalidate();
            }

            mLastTouchX = x;
            mLastTouchY = y;

            break;
        }

        case MotionEvent.ACTION_UP: {
            mActivePointerId = INVALID_POINTER_ID;
            break;
        }

        case MotionEvent.ACTION_CANCEL: {
            mActivePointerId = INVALID_POINTER_ID;
            break;
        }

        case MotionEvent.ACTION_POINTER_UP: {
            final int pointerIndex = (ev.getAction() & MotionEvent.ACTION_POINTER_INDEX_MASK)
                    >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
            final int pointerId = ev.getPointerId(pointerIndex);
            if (pointerId == mActivePointerId) {
                // This was our active pointer going up. Choose a new
                // active pointer and adjust accordingly.
                final int newPointerIndex = pointerIndex == 0 ? 1 : 0;
                mLastTouchX = ev.getX(newPointerIndex);
                mLastTouchY = ev.getY(newPointerIndex);
                mActivePointerId = ev.getPointerId(newPointerIndex);
            }
            break;
        }
        }

        return true;
    }

    @Override
    public void onDraw(Canvas canvas) {
        super.onDraw(canvas);

        canvas.save();
        Log.d("DEBUG", "X: "+mPosX+" Y: "+mPosY);
        canvas.translate(mPosX, mPosY);
        canvas.scale(mScaleFactor, mScaleFactor);
        mImage.draw(canvas);
        canvas.restore();
    }

二、

Posted by Tim Bray on 09 June 2010
at
8:37 PM



[This post is by Adam Powell, one of our more touchy-feely Android engineers. — Tim Bray]

The
word “multitouch” gets thrown around quite a bit and it’s not always
clear what people are referring to. For some it’s about hardware
capability, for others it refers to specific gesture support in
software. Whatever you decide to call it, today we’re going to look at
how to make your apps and views behave nicely with multiple fingers on
the screen.

This post is going to be heavy on code examples. It
will cover creating a custom View that responds to touch events and
allows the user to manipulate an object drawn within it. To get the most
out of the examples you should be familiar with setting up an Activity and the basics of the Android UI system. Full project source will be linked at the end.

We’ll begin with a new View class that draws an object (our application icon) at a given position:

public class TouchExampleView extends View {
    private Drawable mIcon;
    private float mPosX;
    private float mPosY;
   
    private float mLastTouchX;
    private float mLastTouchY;
   
    public TouchExampleView(Context context) {
        this(context, null, 0);
    }
   
    public TouchExampleView(Context context, AttributeSet attrs) {
        this(context, attrs, 0);
    }
   
    public TouchExampleView(Context context, AttributeSet attrs, int defStyle) {
        super(context, attrs, defStyle);
        mIcon = context.getResources().getDrawable(R.drawable.icon);
        mIcon.setBounds(0, 0, mIcon.getIntrinsicWidth(), mIcon.getIntrinsicHeight());
    }

    @Override
    public void onDraw(Canvas canvas) {
        super.onDraw(canvas);
       
        canvas.save();
        canvas.translate(mPosX, mPosY);
        mIcon.draw(canvas);
        canvas.restore();
    }

    @Override
    public boolean onTouchEvent(MotionEvent ev) {
        // More to come here later...
        return true;
    }
}

MotionEvent

The Android framework’s primary point of access for touch data is the android.view.MotionEvent class. Passed to your views through the onTouchEvent and onInterceptTouchEvent
methods, MotionEvent contains data about “pointers,” or active touch
points on the device’s screen. Through a MotionEvent you can obtain X/Y
coordinates as well as size and pressure for each pointer. MotionEvent.getAction() returns a value describing what kind of motion event occurred.

One
of the more common uses of touch input is letting the user drag an
object around the screen. We can accomplish this in our View class from
above by implementing onTouchEvent as follows:

@Override
public boolean onTouchEvent(MotionEvent ev) {
    final int action = ev.getAction();
    switch (action) {
    case MotionEvent.ACTION_DOWN: {
        final float x = ev.getX();
        final float y = ev.getY();
       
        // Remember where we started
        mLastTouchX = x;
        mLastTouchY = y;
        break;
    }
       
    case MotionEvent.ACTION_MOVE: {
        final float x = ev.getX();
        final float y = ev.getY();
       
        // Calculate the distance moved
        final float dx = x - mLastTouchX;
        final float dy = y - mLastTouchY;
       
        // Move the object
        mPosX += dx;
        mPosY += dy;
       
        // Remember this touch position for the next move event
        mLastTouchX = x;
        mLastTouchY = y;
       
        // Invalidate to request a redraw
        invalidate();
        break;
    }
    }
   
    return true;
}

The
code above has a bug on devices that support multiple pointers. While
dragging the image around the screen, place a second finger on the
touchscreen then lift the first finger. The image jumps! What’s
happening? We’re calculating the distance to move the object based on
the last known position of the default pointer. When the first finger is
lifted, the second finger becomes the default pointer and we have a
large delta between pointer positions which our code dutifully applies
to the object’s location.

If all you want is info about a single pointer’s location, the methods MotionEvent.getX() and MotionEvent.getY()
are all you need. MotionEvent was extended in Android 2.0 (Eclair) to
report data about multiple pointers and new actions were added to
describe multitouch events. MotionEvent.getPointerCount() returns the number of active pointers. getX and getY now accept an index to specify which pointer’s data to retrieve.

Index vs. ID

At
a higher level, touchscreen data from a snapshot in time may not be
immediately useful since touch gestures involve motion over time
spanning many motion events. A pointer index does not necessarily match
up across complex events, it only indicates the data’s position within
the MotionEvent. However this is not work that your app has to do
itself. Each pointer also has an ID mapping that stays persistent across
touch events. You can retrieve this ID for each pointer using MotionEvent.getPointerId(index) and find an index for a pointer ID using MotionEvent.findPointerIndex(id).

Feeling Better?

Let’s fix the example above by taking pointer IDs into account.

private static final int INVALID_POINTER_ID = -1;

// The ‘active pointer’ is the one currently moving our object.
private int mActivePointerId = INVALID_POINTER_ID;

// Existing code ...

@Override
public boolean onTouchEvent(MotionEvent ev) {
    final int action = ev.getAction();
    switch (action & MotionEvent.ACTION_MASK) {
    case MotionEvent.ACTION_DOWN: {
        final float x = ev.getX();
        final float y = ev.getY();
       
        mLastTouchX = x;
        mLastTouchY = y;

        // Save the ID of this pointer
        mActivePointerId = ev.getPointerId(0);
        break;
    }
       
    case MotionEvent.ACTION_MOVE: {
        // Find the index of the active pointer and fetch its position
        final int pointerIndex = ev.findPointerIndex(mActivePointerId);
        final float x = ev.getX(pointerIndex);
        final float y = ev.getY(pointerIndex);
       
        final float dx = x - mLastTouchX;
        final float dy = y - mLastTouchY;
       
        mPosX += dx;
        mPosY += dy;
       
        mLastTouchX = x;
        mLastTouchY = y;
       
        invalidate();
        break;
    }
       
    case MotionEvent.ACTION_UP: {
        mActivePointerId = INVALID_POINTER_ID;
        break;
    }
       
    case MotionEvent.ACTION_CANCEL: {
        mActivePointerId = INVALID_POINTER_ID;
        break;
    }
   
    case MotionEvent.ACTION_POINTER_UP: {
        // Extract the index of the pointer that left the touch sensor
        final int pointerIndex = (action & MotionEvent.ACTION_POINTER_INDEX_MASK)
                >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
        final int pointerId = ev.getPointerId(pointerIndex);
        if (pointerId == mActivePointerId) {
            // This was our active pointer going up. Choose a new
            // active pointer and adjust accordingly.
            final int newPointerIndex = pointerIndex == 0 ? 1 : 0;
            mLastTouchX = ev.getX(newPointerIndex);
            mLastTouchY = ev.getY(newPointerIndex);
            mActivePointerId = ev.getPointerId(newPointerIndex);
        }
        break;
    }
    }
   
    return true;
}

There are a few new elements at work here. We’re switching on action & MotionEvent.ACTION_MASK now rather than just action itself, and we’re using a new MotionEvent action constant, MotionEvent.ACTION_POINTER_UP. ACTION_POINTER_DOWN
and ACTION_POINTER_UP are fired whenever a secondary pointer goes down
or up. If there is already a pointer on the screen and a new one goes
down, you will receive ACTION_POINTER_DOWN instead of ACTION_DOWN. If a
pointer goes up but there is still at least one touching the screen, you
will receive ACTION_POINTER_UP instead of ACTION_UP.

The
ACTION_POINTER_DOWN and ACTION_POINTER_UP events encode extra
information in the action value. ANDing it with MotionEvent.ACTION_MASK
gives us the action constant while ANDing it with ACTION_POINTER_INDEX_MASK
gives us the index of the pointer that went up or down. In the
ACTION_POINTER_UP case our example extracts this index and ensures that
our active pointer ID is not referring to a pointer that is no longer
touching the screen. If it was, we select a different pointer to be
active and save its current X and Y position. Since this saved position
is used in the ACTION_MOVE case to calculate the distance to move the
onscreen object, we will always calculate the distance to move using
data from the correct pointer.

This is all the data that you need
to process any sort of gesture your app may require. However dealing
with this low-level data can be cumbersome when working with more
complex gestures. Enter GestureDetectors.

GestureDetectors

Since
apps can have vastly different needs, Android does not spend time
cooking touch data into higher level events unless you specifically
request it. GestureDetectors are small filter objects that consume
MotionEvents and dispatch higher level gesture events to listeners
specified during their construction. The Android framework provides two
GestureDetectors out of the box, but you should also feel free to use
them as examples for implementing your own if needed. GestureDetectors
are a pattern, not a prepacked solution. They’re not just for complex
gestures such as drawing a star while standing on your head, they can
even make simple gestures like fling or double tap easier to work with.

android.view.GestureDetector
generates gesture events for several common single-pointer gestures
used by Android including scrolling, flinging, and long press. For
Android 2.2 (Froyo) we’ve also added android.view.ScaleGestureDetector for processing the most commonly requested two-finger gesture: pinch zooming.

Gesture
detectors follow the pattern of providing a method public boolean
onTouchEvent(MotionEvent). This method, like its namesake in
android.view.View, returns true if it handles the event and false if it
does not. In the context of a gesture detector, a return value of true
implies that there is an appropriate gesture currently in progress.
GestureDetector and ScaleGestureDetector can be used together when you
want a view to recognize multiple gestures.

To report detected
gesture events, gesture detectors use listener objects passed to their
constructors. ScaleGestureDetector uses ScaleGestureDetector.OnScaleGestureListener. ScaleGestureDetector.SimpleOnScaleGestureListener is offered as a helper class that you can extend if you don’t care about all of the reported events.

Since we are already supporting dragging in our example, let’s add support for scaling. The updated example code is shown below:

private ScaleGestureDetector mScaleDetector;
private float mScaleFactor = 1.f;

// Existing code ...

public TouchExampleView(Context context, AttributeSet attrs, int defStyle) {
    super(context, attrs, defStyle);
    mIcon = context.getResources().getDrawable(R.drawable.icon);
    mIcon.setBounds(0, 0, mIcon.getIntrinsicWidth(), mIcon.getIntrinsicHeight());
   
    // Create our ScaleGestureDetector
    mScaleDetector = new ScaleGestureDetector(context, new ScaleListener());
}

@Override
public boolean onTouchEvent(MotionEvent ev) {
    // Let the ScaleGestureDetector inspect all events.
    mScaleDetector.onTouchEvent(ev);
   
    final int action = ev.getAction();
    switch (action & MotionEvent.ACTION_MASK) {
    case MotionEvent.ACTION_DOWN: {
        final float x = ev.getX();
        final float y = ev.getY();
       
        mLastTouchX = x;
        mLastTouchY = y;
        mActivePointerId = ev.getPointerId(0);
        break;
    }
       
    case MotionEvent.ACTION_MOVE: {
        final int pointerIndex = ev.findPointerIndex(mActivePointerId);
        final float x = ev.getX(pointerIndex);
        final float y = ev.getY(pointerIndex);

        // Only move if the ScaleGestureDetector isn't processing a gesture.
        if (!mScaleDetector.isInProgress()) {
            final float dx = x - mLastTouchX;
            final float dy = y - mLastTouchY;

            mPosX += dx;
            mPosY += dy;

            invalidate();
        }

        mLastTouchX = x;
        mLastTouchY = y;

        break;
    }
       
    case MotionEvent.ACTION_UP: {
        mActivePointerId = INVALID_POINTER_ID;
        break;
    }
       
    case MotionEvent.ACTION_CANCEL: {
        mActivePointerId = INVALID_POINTER_ID;
        break;
    }
   
    case MotionEvent.ACTION_POINTER_UP: {
        final int pointerIndex = (ev.getAction() & MotionEvent.ACTION_POINTER_INDEX_MASK)
                >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
        final int pointerId = ev.getPointerId(pointerIndex);
        if (pointerId == mActivePointerId) {
            // This was our active pointer going up. Choose a new
            // active pointer and adjust accordingly.
            final int newPointerIndex = pointerIndex == 0 ? 1 : 0;
            mLastTouchX = ev.getX(newPointerIndex);
            mLastTouchY = ev.getY(newPointerIndex);
            mActivePointerId = ev.getPointerId(newPointerIndex);
        }
        break;
    }
    }
   
    return true;
}

@Override
public void onDraw(Canvas canvas) {
    super.onDraw(canvas);
   
    canvas.save();
    canvas.translate(mPosX, mPosY);
    canvas.scale(mScaleFactor, mScaleFactor);
    mIcon.draw(canvas);
    canvas.restore();
}

private class ScaleListener extends ScaleGestureDetector.SimpleOnScaleGestureListener {
    @Override
    public boolean onScale(ScaleGestureDetector detector) {
        mScaleFactor *= detector.getScaleFactor();
       
        // Don't let the object get too small or too large.
        mScaleFactor = Math.max(0.1f, Math.min(mScaleFactor, 5.0f));

        invalidate();
        return true;
    }
}

This
example merely scratches the surface of what ScaleGestureDetector
offers. The listener methods receive a reference to the detector itself
as a parameter that can be queried for extended information about the
gesture in progress. See the ScaleGestureDetector API documentation for more details.

Now
our example app allows a user to drag with one finger, scale with two,
and it correctly handles passing active pointer focus between fingers as
they contact and leave the screen. You can download the final sample
project at http://code.google.com/p/android-touchexample/. It requires the Android 2.2 SDK (API level 8) to build and a 2.2 (Froyo) powered device to run.

From Example to Application

In
a real app you would want to tweak the details about how zooming
behaves. When zooming, users will expect content to zoom about the focal
point of the gesture as reported by ScaleGestureDetector.getFocusX() and getFocusY(). The specifics of this will vary depending on how your app represents and draws its content.

Different
touchscreen hardware may have different capabilities; some panels may
only support a single pointer, others may support two pointers but with
position data unsuitable for complex gestures, and others may support
precise positioning data for two pointers and beyond. You can query what
type of touchscreen a device has at runtime using PackageManager.hasSystemFeature().

As
you design your user interface keep in mind that people use their
mobile devices in many different ways and not all Android devices are
created equal. Some apps might be used one-handed, making
multiple-finger gestures awkward. Some users prefer using directional
pads or trackballs to navigate. Well-designed gesture support can put
complex functionality at your users’ fingertips, but also consider
designing alternate means of accessing application functionality that
can coexist with gestures.





























Trackbacks




 

Android 2.3 r1 中文API (57) —— ScaleGestureDetector



 




前言.
本章内容是android.view.ScaleGestureDetector,允许Views可以通过提供的MotionEvents检测和处理包括
多点触摸在内的手势变化信息,版本为Android 2.3 r1,翻译来自"一昕",再次感谢"一昕"
!期待你一起参与Android中文API的翻译,联系 ...



Posted by
农民伯伯


at
07 December, 2010 17:19



 

Программирование для Android



 




HelloWorld для Android. установка и настройка ide для android. Android и
сетевые коммуникации. Making Sense of Multitouch. Удержание баланса
между функциональностью и совместимостью при разработке приложения ...



Posted by
artyomr


at
18 August, 2010 12:57



 

How to have your (Cup)cake and eat it too



 




[This post is by Adam Powell, his second touchy-feely outing in just a
few weeks. I asked him to send me a better picture than we ran last
time, and got this in response. Photo by our own Romain Guy. — Tim Bray]
...



Posted by
Tim Bray


at
12 July, 2010 15:29



 

Ressources pédagogiques (et autres) :o)



 




EDD: G8 G20 : Et le climat dans tout ça? Oil from Gulf Spill Could Have
Powered 38000 Cars (and More) for a Year, Researcher Says.
Histoire-Géographie: FEWS NET Food Security Outlook - Most Likely
Scenario, April to September 2010 ...



Posted by
Henri Willox


at
11 June, 2010 03:20



 

Android Engineer Adam Powell Helps You Make Sense of Multitouch