diff --git a/.classpath b/.classpath
new file mode 100644
index 0000000..7bc01d9
--- /dev/null
+++ b/.classpath
@@ -0,0 +1,9 @@
+
+
+ * The box will disappear if the "OK" button is touched, if an area outside the box is + * touched, if the screen is rotated ... doing just about anything makes it disappear. + */ + public static void display(Activity caller) { + String versionStr = getVersionString(caller); + String aboutHeader = caller.getString(R.string.app_name) + " v" + versionStr; + + // Manually inflate the view that will form the body of the dialog. + View aboutView; + try { + aboutView = caller.getLayoutInflater().inflate(R.layout.about_dialog, null); + } catch (InflateException ie) { + Log.e(TAG, "Exception while inflating about box: " + ie.getMessage()); + return; + } + + AlertDialog.Builder builder = new AlertDialog.Builder(caller); + builder.setTitle(aboutHeader); + builder.setIcon(R.drawable.ic_launcher); + builder.setCancelable(true); // implies setCanceledOnTouchOutside + builder.setPositiveButton(R.string.ok, null); + builder.setView(aboutView); + builder.show(); + } +} diff --git a/src/com/android/grafika/CameraCaptureActivity.java b/src/com/android/grafika/CameraCaptureActivity.java new file mode 100644 index 0000000..672cc2e --- /dev/null +++ b/src/com/android/grafika/CameraCaptureActivity.java @@ -0,0 +1,558 @@ +/* + * Copyright 2013 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.android.grafika; + +import android.opengl.EGL14; +import android.opengl.GLES20; +import android.opengl.GLSurfaceView; +import android.os.Bundle; +import android.os.Handler; +import android.os.Message; +import android.util.Log; +import android.view.View; +import android.widget.Button; +import android.widget.CheckBox; +import android.widget.TextView; +import android.app.Activity; +import android.graphics.SurfaceTexture; +import android.hardware.Camera; + +import java.io.File; +import java.io.IOException; +import java.lang.ref.WeakReference; + +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; + +/** + * Shows the camera preview on screen while simultaneously recording it to a .mp4 file. + *
+ * Every time we receive a frame from the camera, we need to: + *
+ * At any given time there are four things in motion: + *
+ * GLSurfaceView is fairly painful here. Ideally we'd create the video encoder, create + * an EGLContext for it, and pass that into GLSurfaceView to share. The API doesn't allow + * this, so we have to do it the other way around. When GLSurfaceView gets torn down + * (say, because we rotated the device), the EGLContext gets tossed, which means that when + * it comes back we have to re-create the EGLContext used by the video encoder. (And, no, + * the "preserve EGLContext on pause" feature doesn't help.) + *
+ * We could simplify this quite a bit by using TextureView instead of GLSurfaceView, but that + * comes with a performance hit. We could also have the renderer thread drive the video + * encoder directly, allowing them to work from a single EGLContext, but it's useful to + * decouple the operations, and it's generally unwise to perform disk I/O on the thread that + * renders your UI. + *
+ * We want to access Camera from the UI thread (setup, teardown) and the renderer thread + * (configure SurfaceTexture, start preview), but the API says you can only access the object + * from a single thread. So we need to pick one thread to own it, and the other thread has to + * access it remotely. Some things are simpler if we let the renderer thread manage it, + * but we'd really like to be sure that Camera is released before we leave onPause(), which + * means we need to make a synchronous call from the UI thread into the renderer thread, which + * we don't really have full control over. It's less scary to have the UI thread own Camera + * and have the renderer call back into the UI thread through the standard Handler mechanism. + *
+ * With three threads working simultaneously (plus Camera causing periodic events as frames + * arrive) we have to be very careful when communicating state changes. In general we want + * to send a message to the thread, rather than directly accessing state in the object. + *
+ * + *
+ * To exercise the API a bit, the video encoder is required to survive Activity restarts. In the + * current implementation it stops recording but doesn't stop time from advancing, so you'll + * see a pause in the video. (We could adjust the timer to make it seamless, or output a + * "paused" message and hold on that in the recording, or leave the Camera running so it + * continues to generate preview frames while the Activity is paused.) The video encoder object + * is managed as a static property of the Activity. + */ +public class CameraCaptureActivity extends Activity + implements SurfaceTexture.OnFrameAvailableListener { + private static final String TAG = MainActivity.TAG; + private static final boolean VERBOSE = false; + + private GLSurfaceView mGLView; + private CameraSurfaceRenderer mRenderer; + private Camera mCamera; + private CameraHandler mCameraHandler; + private boolean mRecordingEnabled; // controls button state + + // this is static so it survives activity restarts + private static TextureMovieEncoder sVideoEncoder = new TextureMovieEncoder(); + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_camera_capture); + + File outputFile = new File(getFilesDir(), "camera-test.mp4"); + TextView fileText = (TextView) findViewById(R.id.cameraOutputFile_text); + fileText.setText(outputFile.toString()); + + // Define a handler that receives camera-control messages from other threads. All calls + // to Camera must be made on the same thread. + mCameraHandler = new CameraHandler(this); + + mRecordingEnabled = sVideoEncoder.isRecording(); + + // Configure the GLSurfaceView. This will start the Renderer thread, with an + // appropriate EGL context. + mGLView = (GLSurfaceView) findViewById(R.id.cameraPreviewSurface); + mGLView.setEGLContextClientVersion(2); // select GLES 2.0 + mRenderer = new CameraSurfaceRenderer(mCameraHandler, sVideoEncoder, outputFile); + mGLView.setRenderer(mRenderer); + mGLView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); + + Log.d(TAG, "onCreate complete: " + this); + } + + @Override + public void onResume() { + Log.d(TAG, "onResume -- acquiring camera"); + super.onResume(); + updateControls(); + openCamera(1280, 720); + mGLView.onResume(); + Log.d(TAG, "onResume complete: " + this); + } + + @Override + public void onPause() { + Log.d(TAG, "onPause -- releasing camera"); + super.onPause(); + releaseCamera(); + mGLView.queueEvent(new Runnable() { + @Override public void run() { + // Tell the renderer that it's about to be paused so it can clean up. + mRenderer.notifyPausing(); + } + }); + mGLView.onPause(); + Log.d(TAG, "onPause complete"); + } + + @Override + public void onDestroy() { + Log.d(TAG, "onDestroy"); + super.onDestroy(); + mCameraHandler.invalidateHandler(); // paranoia + } + + /** + * Opens a camera, and attempts to establish preview mode at the specified width and height. + */ + private void openCamera(int desiredWidth, int desiredHeight) { + if (mCamera != null) { + throw new RuntimeException("camera already initialized"); + } + + Camera.CameraInfo info = new Camera.CameraInfo(); + + // Try to find a front-facing camera (e.g. for videoconferencing). + int numCameras = Camera.getNumberOfCameras(); + for (int i = 0; i < numCameras; i++) { + Camera.getCameraInfo(i, info); + if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { + mCamera = Camera.open(i); + break; + } + } + if (mCamera == null) { + Log.d(TAG, "No front-facing camera found; opening default"); + mCamera = Camera.open(); // opens first back-facing camera + } + if (mCamera == null) { + throw new RuntimeException("Unable to open camera"); + } + + Camera.Parameters parms = mCamera.getParameters(); + + choosePreviewSize(parms, desiredWidth, desiredHeight); + // leave the frame rate set to default + mCamera.setParameters(parms); + + int[] fpsRange = new int[2]; + Camera.Size mCameraPreviewSize = parms.getPreviewSize(); + parms.getPreviewFpsRange(fpsRange); + String previewFacts = mCameraPreviewSize.width + "x" + mCameraPreviewSize.height; + if (fpsRange[0] == fpsRange[1]) { + previewFacts += " @" + (fpsRange[0] / 1000.0) + "fps"; + } else { + previewFacts += " @" + (fpsRange[0] / 1000.0) + " - " + (fpsRange[1] / 1000.0) + "fps"; + } + TextView text = (TextView) findViewById(R.id.cameraParams_text); + text.setText(previewFacts); + } + + /** + * Attempts to find a preview size that matches the provided width and height (which + * specify the dimensions of the encoded video). If it fails to find a match it just + * uses the default preview size. + *
+ * TODO: should do a best-fit match. + */ + private static void choosePreviewSize(Camera.Parameters parms, int width, int height) { + // We should make sure that the requested MPEG size is less than the preferred + // size, and has the same aspect ratio. + Camera.Size ppsfv = parms.getPreferredPreviewSizeForVideo(); + if (ppsfv != null) { + Log.d(TAG, "Camera preferred preview size for video is " + + ppsfv.width + "x" + ppsfv.height); + } + + for (Camera.Size size : parms.getSupportedPreviewSizes()) { + if (size.width == width && size.height == height) { + parms.setPreviewSize(width, height); + return; + } + } + + Log.w(TAG, "Unable to set preview size to " + width + "x" + height); + if (ppsfv != null) { + parms.setPreviewSize(ppsfv.width, ppsfv.height); + } + // else use whatever the default size is + } + + /** + * Stops camera preview, and releases the camera to the system. + */ + private void releaseCamera() { + if (mCamera != null) { + mCamera.stopPreview(); + mCamera.release(); + mCamera = null; + Log.d(TAG, "releaseCamera -- done"); + } + } + + /** + * onClick handler for "record" button. + */ + public void clickToggleRecording(View unused) { + mRecordingEnabled = !mRecordingEnabled; + mGLView.queueEvent(new Runnable() { + @Override public void run() { + // notify the renderer that we want to change the encoder's state + mRenderer.changeRecordingState(mRecordingEnabled); + } + }); + updateControls(); + } + + /** + * onClick handler for "rebind" checkbox. + */ + public void clickRebindCheckbox(View unused) { + CheckBox cb = (CheckBox) findViewById(R.id.rebindHack_checkbox); + TextureRender.sWorkAroundContextProblem = cb.isChecked(); + } + + /** + * Updates the on-screen controls to reflect the current state of the app. + */ + private void updateControls() { + Button toggleRelease = (Button) findViewById(R.id.toggleRecording_button); + int id = mRecordingEnabled ? + R.string.toggleRecordingOff : R.string.toggleRecordingOn; + toggleRelease.setText(id); + + CheckBox cb = (CheckBox) findViewById(R.id.rebindHack_checkbox); + cb.setChecked(TextureRender.sWorkAroundContextProblem); + } + + /** + * Connects the SurfaceTexture to the Camera preview output, and starts the preview. + */ + private void handleSetSurfaceTexture(SurfaceTexture st) { + st.setOnFrameAvailableListener(CameraCaptureActivity.this); + try { + mCamera.setPreviewTexture(st); + } catch (IOException ioe) { + throw new RuntimeException(ioe); + } + mCamera.startPreview(); + } + + @Override + public void onFrameAvailable(SurfaceTexture st) { + // The SurfaceTexture uses this to signal the availability of a new frame. The + // thread that "owns" the external texture associated with the SurfaceTexture (which, + // by virtue of the context being shared, *should* be either one) needs to call + // updateTexImage() to latch the buffer. + // + // Once the buffer is latched, the GLSurfaceView thread can signal the encoder thread. + // This feels backward -- we want recording to be prioritized over rendering -- but + // since recording is only enabled some of the time it's easier to do it this way. + // + // Since GLSurfaceView doesn't establish a Looper, this will *probably* execute on + // the main UI thread. Fortunately, requestRender() can be called from any thread, + // so it doesn't really matter. + if (VERBOSE) Log.d(TAG, "ST onFrameAvailable"); + mGLView.requestRender(); + } + + /** + * Handles camera operation requests from other threads. Necessary because the Camera + * must only be accessed from one thread. + *
+ * The handlers all run on the UI thread.
+ */
+ static class CameraHandler extends Handler {
+ public static final int MSG_SET_SURFACE_TEXTURE = 0;
+
+ private WeakReference
+ * Do not call any methods here directly from another thread -- use the
+ * GLSurfaceView#queueEvent() call.
+ */
+class CameraSurfaceRenderer implements GLSurfaceView.Renderer {
+ private static final String TAG = MainActivity.TAG;
+ private static final boolean VERBOSE = false;
+
+ private static final int RECORDING_OFF = 0;
+ private static final int RECORDING_ON = 1;
+ private static final int RECORDING_RESUMED = 2;
+
+ private CameraCaptureActivity.CameraHandler mCameraHandler;
+ private TextureMovieEncoder mVideoEncoder;
+ private File mOutputFile;
+
+ private TextureRender mTextureRender;
+ private SurfaceTexture mSurfaceTexture;
+ private boolean mRecordingEnabled;
+ private int mRecordingStatus;
+ private int mFrameCount;
+
+
+ /**
+ * Constructs CameraSurfaceRenderer.
+ *
+ * @param cameraHandler Handler for communicating with UI thread
+ * @param movieEncoder video encoder object
+ * @param outputFile output file for encoded video; forwarded to movieEncoder
+ */
+ public CameraSurfaceRenderer(CameraCaptureActivity.CameraHandler cameraHandler,
+ TextureMovieEncoder movieEncoder, File outputFile) {
+ mCameraHandler = cameraHandler;
+ mVideoEncoder = movieEncoder;
+ mOutputFile = outputFile;
+
+ mRecordingStatus = -1;
+ mRecordingEnabled = false;
+ mFrameCount = -1;
+ }
+
+ /**
+ * Notifies the renderer thread that the activity is pausing.
+ *
+ * For best results, call this *after* disabling Camera preview.
+ */
+ public void notifyPausing() {
+ if (mSurfaceTexture != null) {
+ Log.d(TAG, "renderer pausing -- releasing SurfaceTexture");
+ mSurfaceTexture.release();
+ mSurfaceTexture = null;
+ }
+ }
+
+ /**
+ * Notifies the renderer that we want to stop or start recording.
+ */
+ public void changeRecordingState(boolean isRecording) {
+ Log.d(TAG, "changeRecordingState: was " + mRecordingEnabled + " now " + isRecording);
+ mRecordingEnabled = isRecording;
+ }
+
+ @Override
+ public void onSurfaceCreated(GL10 unused, EGLConfig config) {
+ Log.d(TAG, "onSurfaceCreated");
+
+ // We're starting up or coming back. Either way we've got a new EGLContext that will
+ // need to be shared with the video encoder, so figure out if a recording is already
+ // in progress.
+ mRecordingEnabled = mVideoEncoder.isRecording();
+ if (mRecordingEnabled) {
+ mRecordingStatus = RECORDING_RESUMED;
+ } else {
+ mRecordingStatus = RECORDING_OFF;
+ }
+
+ mTextureRender = new TextureRender();
+ mTextureRender.surfaceCreated();
+ Log.d(TAG, "ZZZ got " + mTextureRender.getTextureId());
+
+ // Create a SurfaceTexture, with an external texture, in this EGL context. We don't
+ // have a Looper in this thread -- GLSurfaceView doesn't create one -- so the frame
+ // available messages will arrive on the main thread.
+ mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
+
+ // Tell the UI thread to enable the camera preview.
+ mCameraHandler.sendMessage(mCameraHandler.obtainMessage(
+ CameraCaptureActivity.CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));
+ }
+
+ @Override
+ public void onSurfaceChanged(GL10 unused, int width, int height) {
+ Log.d(TAG, "onSurfaceChanged " + width + "x" + height);
+ }
+
+ @Override
+ public void onDrawFrame(GL10 unused) {
+ if (VERBOSE) Log.d(TAG, "onDrawFrame tex=" + mTextureRender.getTextureId());
+ boolean showBox = false;
+
+ // Latch the latest frame. If there isn't anything new, we'll just re-use whatever
+ // was there before.
+ mSurfaceTexture.updateTexImage();
+
+ // If the recording state is changing, take care of it here. Ideally we wouldn't
+ // be doing all this in onDrawFrame(), but the EGLContext sharing with GLSurfaceView
+ // makes it hard to do elsewhere.
+ if (mRecordingEnabled) {
+ switch (mRecordingStatus) {
+ case RECORDING_OFF:
+ Log.d(TAG, "START recording");
+ // start recording
+ mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
+ mOutputFile, 640, 480, 1000000, EGL14.eglGetCurrentContext()));
+ mRecordingStatus = RECORDING_ON;
+ break;
+ case RECORDING_RESUMED:
+ Log.d(TAG, "RESUME recording");
+ mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext());
+ mRecordingStatus = RECORDING_ON;
+ break;
+ case RECORDING_ON:
+ // yay
+ break;
+ default:
+ throw new RuntimeException("unknown status " + mRecordingStatus);
+ }
+ } else {
+ switch (mRecordingStatus) {
+ case RECORDING_ON:
+ case RECORDING_RESUMED:
+ // stop recording
+ Log.d(TAG, "STOP recording");
+ mVideoEncoder.stopRecording();
+ mRecordingStatus = RECORDING_OFF;
+ break;
+ case RECORDING_OFF:
+ // yay
+ break;
+ default:
+ throw new RuntimeException("unknown status " + mRecordingStatus);
+ }
+ }
+
+ // Set the video encoder's texture name. We only need to do this once, but in the
+ // current implementation it has to happen after the video encoder is started, so
+ // we just do it here.
+ //
+ // (We should probably be good GLES citizens and delete the initial texture name
+ // generated by TextureRender. We'd want to do this right after calling startRecording.
+ // Or just be smarter about how TextureRender works with external textures.)
+ //
+ // TODO: be less lame.
+ mVideoEncoder.setTextureId(mTextureRender.getTextureId());
+
+ // This will be ignored if we're not actually recording.
+ mVideoEncoder.frameAvailable(mSurfaceTexture);
+
+ // Draw the video frame.
+ mTextureRender.drawFrame(mSurfaceTexture);
+
+ // Draw a flashing box if we're recording. This only appears on screen.
+ showBox = (mRecordingStatus == RECORDING_ON);
+ if (showBox && (++mFrameCount & 0x04) == 0) {
+ drawBox();
+ }
+ }
+
+ /**
+ * Draws a red box in the corner.
+ */
+ private void drawBox() {
+ GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
+ GLES20.glScissor(0, 0, 100, 100);
+ GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
+ }
+}
diff --git a/src/com/android/grafika/Content.java b/src/com/android/grafika/Content.java
new file mode 100644
index 0000000..97b4540
--- /dev/null
+++ b/src/com/android/grafika/Content.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+/**
+ * Content, managed by ContentManager.
+ * TODO: re-evaluate whether this is useful
+ */
+public interface Content {
+// File getFile();
+}
diff --git a/src/com/android/grafika/ContentManager.java b/src/com/android/grafika/ContentManager.java
new file mode 100644
index 0000000..b9e91ca
--- /dev/null
+++ b/src/com/android/grafika/ContentManager.java
@@ -0,0 +1,258 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.content.Context;
+import android.os.AsyncTask;
+import android.util.Log;
+import android.widget.ProgressBar;
+import android.widget.TextView;
+
+import java.io.File;
+import java.util.ArrayList;
+
+/**
+ * Manages content generated by the app.
+ *
+ * [ Originally this was going to prepare stuff on demand, but it's easier to just
+ * create it all up front on first launch. ]
+ *
+ * Class is thread-safe.
+ */
+public class ContentManager {
+ private static final String TAG = MainActivity.TAG;
+
+ // Enumerated content tags. These are used as indices into the mContent ArrayList,
+ // so don't make them sparse.
+ // TODO: consider using String tags and a HashMap?
+ public static final int MOVIE_EIGHT_RECTS = 0;
+ public static final int MOVIE_SLIDERS = 1;
+
+ private static final int[] ALL_TAGS = new int[] {
+ MOVIE_EIGHT_RECTS,
+ MOVIE_SLIDERS
+ };
+
+ // Housekeeping.
+ private static final Object sLock = new Object();
+ private static ContentManager sInstance = null;
+
+ private boolean mInitialized = false;
+ private File mFilesDir;
+ private ArrayList
+ * If this returns false, call createAll.
+ */
+ public boolean isContentCreated(Context unused) {
+ // Ideally this would probe each individual item to see if anything needs to be done,
+ // and a subsequent "prepare" call would generate only the necessary items. This
+ // takes a much simpler approach and just checks to see if the files exist. If the
+ // content changes the user will need to force a regen (via a menu option) or wipe data.
+
+ for (int i = 0; i < ALL_TAGS.length; i++) {
+ File file = getPath(i);
+ if (!file.canRead()) {
+ Log.d(TAG, "Can't find readable " + file);
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Creates all content, overwriting any existing entries.
+ *
+ * Call from main UI thread.
+ */
+ public void createAll(Activity caller) {
+ prepareContent(caller, ALL_TAGS);
+ }
+
+ /**
+ * Prepares the specified content. For example, if the caller requires a movie that doesn't
+ * exist, this will post a progress dialog and generate the movie.
+ *
+ * Call from main UI thread.
+ */
+ public void prepareContent(Activity caller, int[] tags) {
+ // Put up the progress dialog.
+ AlertDialog.Builder builder = WorkDialog.create(caller, R.string.preparing_content);
+ builder.setCancelable(false);
+ AlertDialog dialog = builder.show();
+
+ // Generate content in async task.
+ GenerateTask genTask = new GenerateTask(dialog, tags);
+ genTask.execute();
+ }
+
+ /**
+ * Returns the specified item.
+ */
+ public Content getContent(int tag) {
+ synchronized (mContent) {
+ return mContent.get(tag);
+ }
+ }
+
+ /**
+ * Prepares the specified item.
+ *
+ * This may be called from the async task thread.
+ */
+ private void prepare(ProgressUpdater prog, int tag) {
+ GeneratedMovie movie;
+ switch (tag) {
+ case MOVIE_EIGHT_RECTS:
+ movie = new MovieEightRects();
+ movie.create(getPath(tag), prog);
+ synchronized (mContent) {
+ mContent.add(tag, movie);
+ }
+ break;
+ case MOVIE_SLIDERS:
+ movie = new MovieSliders();
+ movie.create(getPath(tag), prog);
+ synchronized (mContent) {
+ mContent.add(tag, movie);
+ }
+ break;
+ default:
+ throw new RuntimeException("Unknown tag " + tag);
+ }
+ }
+
+ /**
+ * Returns the filename for the tag.
+ */
+ private String getFileName(int tag) {
+ switch (tag) {
+ case MOVIE_EIGHT_RECTS:
+ return "gen-eight-rects.mp4";
+ case MOVIE_SLIDERS:
+ return "gen-sliders.mp4";
+ default:
+ throw new RuntimeException("Unknown tag " + tag);
+ }
+ }
+
+ /**
+ * Returns the storage location for the specified item.
+ */
+ private File getPath(int tag) {
+ return new File(mFilesDir, getFileName(tag));
+ }
+
+ public interface ProgressUpdater {
+ /**
+ * Updates a progress meter.
+ * @param percent Percent completed (0-100).
+ */
+ void updateProgress(int percent);
+ }
+
+ /**
+ * Performs generation of content on an async task thread.
+ */
+ private static class GenerateTask extends AsyncTask
+ * @param sharedContext The context to share, or null if sharing is not desired.
+ * @param flags Configuration bit flags, e.g. FLAG_RECORDABLE.
+ */
+ public EglCore(EGLContext sharedContext, int flags) {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("EGL already set up");
+ }
+
+ if (sharedContext == null) {
+ sharedContext = EGL14.EGL_NO_CONTEXT;
+ }
+
+ mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
+ mEGLDisplay = null;
+ throw new RuntimeException("unable to initialize EGL14");
+ }
+
+ // The actual surface is generally RGBA or RGBX, so situationally omitting alpha
+ // doesn't really help. It can also lead to a huge performance hit on glReadPixels()
+ // when reading into a GL_RGBA buffer.
+ int[] attribList = {
+ EGL14.EGL_RED_SIZE, 8,
+ EGL14.EGL_GREEN_SIZE, 8,
+ EGL14.EGL_BLUE_SIZE, 8,
+ EGL14.EGL_ALPHA_SIZE, 8,
+ EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+ EGL14.EGL_NONE, 0, // placeholder for recordable [@-3]
+ EGL14.EGL_NONE
+ };
+ if ((flags & FLAG_RECORDABLE) != 0) {
+ attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID;
+ attribList[attribList.length - 2] = 1;
+ }
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
+ numConfigs, 0)) {
+ throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
+ }
+ mEGLConfig = configs[0];
+
+ mEGLContext = null;
+ if ((flags & FLAG_TRY_GLES3) != 0) {
+ // If requested, try to configure context for OpenGL ES 3.x. Note that
+ // contexts with different client versions can't share state.
+ mGlVersion = 3;
+ int[] attrib3_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 3,
+ EGL14.EGL_NONE
+ };
+ mEGLContext = EGL14.eglCreateContext(mEGLDisplay, mEGLConfig, sharedContext,
+ attrib3_list, 0);
+ if (EGL14.eglGetError() != EGL14.EGL_SUCCESS) {
+ Log.d(TAG, "GLES 3.x not available");
+ mEGLContext = null;
+ }
+ }
+ if (mEGLContext == null) {
+ mGlVersion = 2;
+ int[] attrib2_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+ EGL14.EGL_NONE
+ };
+ mEGLContext = EGL14.eglCreateContext(mEGLDisplay, mEGLConfig, sharedContext,
+ attrib2_list, 0);
+ }
+ checkEglError("eglCreateContext");
+ if (mEGLContext == null) {
+ throw new RuntimeException("null context");
+ }
+ }
+
+ /**
+ * Discard all resources held by this class, notably the EGL context.
+ */
+ public void release() {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ // Android is unusual in that it uses a reference-counted EGLDisplay. So for
+ // every eglInitialize() we need an eglTerminate().
+ EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(mEGLDisplay);
+ }
+
+ mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ mEGLContext = EGL14.EGL_NO_CONTEXT;
+ mEGLConfig = null;
+ }
+
+ /**
+ * Destroys the specified surface. Note the surface won't actually be destroyed if it's
+ * still current in a context.
+ */
+ public void releaseSurface(EGLSurface eglSurface) {
+ EGL14.eglDestroySurface(mEGLDisplay, eglSurface);
+ }
+
+ /**
+ * Creates an EGL surface associated with a Surface.
+ *
+ * If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute.
+ */
+ public EGLSurface createWindowSurface(Object surface) {
+ if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
+ throw new RuntimeException("invalid surface: " + surface);
+ }
+
+ // Create a window surface, and attach it to the Surface we received.
+ int[] surfaceAttribs = {
+ EGL14.EGL_NONE
+ };
+ EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface,
+ surfaceAttribs, 0);
+ checkEglError("eglCreateWindowSurface");
+ if (eglSurface == null) {
+ throw new RuntimeException("surface was null");
+ }
+ return eglSurface;
+ }
+
+ /**
+ * Creates an EGL surface associated with an offscreen buffer.
+ */
+ public EGLSurface createOffscreenSurface(int width, int height) {
+ int[] surfaceAttribs = {
+ EGL14.EGL_WIDTH, width,
+ EGL14.EGL_HEIGHT, height,
+ EGL14.EGL_NONE
+ };
+ EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig,
+ surfaceAttribs, 0);
+ checkEglError("eglCreatePbufferSurface");
+ if (eglSurface == null) {
+ throw new RuntimeException("surface was null");
+ }
+ return eglSurface;
+ }
+
+ /**
+ * Makes our EGL context and surface current.
+ */
+ public void makeCurrent(EGLSurface eglSurface) {
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ // called makeCurrent() before create?
+ Log.d(TAG, "NOTE: makeCurrent w/o display");
+ }
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ /**
+ * Makes no context current.
+ */
+ public void makeNothingCurrent() {
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
+ EGL14.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ /**
+ * Calls eglSwapBuffers. Use this to "publish" the current frame.
+ *
+ * @return false on failure
+ */
+ public boolean swapBuffers(EGLSurface eglSurface) {
+ return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface);
+ }
+
+ /**
+ * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
+ */
+ public void setPresentationTime(EGLSurface eglSurface, long nsecs) {
+ EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs);
+ }
+
+ /**
+ * Returns true if our context and the specified surface are current.
+ */
+ public boolean isCurrent(EGLSurface eglSurface) {
+ return mEGLContext.equals(EGL14.eglGetCurrentContext()) &&
+ eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW));
+ }
+
+ /**
+ * Performs a simple surface query.
+ */
+ public int querySurface(EGLSurface eglSurface, int what) {
+ int[] value = new int[1];
+ EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0);
+ return value[0];
+ }
+
+ /**
+ * Returns the GLES version this context is configured for (2 or 3).
+ */
+ public int getGlVersion() {
+ return mGlVersion;
+ }
+
+ /**
+ * Writes the current display, context, and surface to the log.
+ */
+ public static void logCurrent(String msg) {
+ EGLDisplay display;
+ EGLContext context;
+ EGLSurface surface;
+
+ display = EGL14.eglGetCurrentDisplay();
+ context = EGL14.eglGetCurrentContext();
+ surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
+ Log.i(TAG, "Current EGL (" + msg + "): display=" + display + ", context=" + context +
+ ", surface=" + surface);
+ }
+
+ /**
+ * Checks for EGL errors.
+ */
+ private void checkEglError(String msg) {
+ int error;
+ if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+ throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
+ }
+ }
+}
diff --git a/src/com/android/grafika/EglSurfaceBase.java b/src/com/android/grafika/EglSurfaceBase.java
new file mode 100644
index 0000000..d2e868b
--- /dev/null
+++ b/src/com/android/grafika/EglSurfaceBase.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.graphics.Bitmap;
+import android.opengl.EGL14;
+import android.opengl.EGLSurface;
+import android.opengl.GLES20;
+import android.util.Log;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * Common base class for EGL surfaces.
+ *
+ * There can be multiple surfaces associated with a single context.
+ */
+public class EglSurfaceBase {
+ protected static final String TAG = MainActivity.TAG;
+
+ // EglBase object we're associated with. It may be associated with multiple surfaces.
+ protected EglCore mEglBase;
+
+ private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
+ private int mWidth = -1;
+ private int mHeight = -1;
+
+ protected EglSurfaceBase(EglCore eglBase) {
+ mEglBase = eglBase;
+ }
+
+ /**
+ * Creates a window surface.
+ *
+ * @param surface May be a Surface or SurfaceTexture.
+ */
+ public void createWindowSurface(Object surface) {
+ if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
+ throw new IllegalStateException("surface already created");
+ }
+ mEGLSurface = mEglBase.createWindowSurface(surface);
+ mWidth = mEglBase.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
+ mHeight = mEglBase.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
+ }
+
+ /**
+ * Creates an off-screen surface.
+ */
+ public void createOffscreenSurface(int width, int height) {
+ if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
+ throw new IllegalStateException("surface already created");
+ }
+ mEGLSurface = mEglBase.createOffscreenSurface(width, height);
+ mWidth = width;
+ mHeight = height;
+ }
+
+ /**
+ * Returns the surface's width, in pixels.
+ */
+ public int getWidth() {
+ return mWidth;
+ }
+
+ /**
+ * Returns the surface's height, in pixels.
+ */
+ public int getHeight() {
+ return mHeight;
+ }
+
+ /**
+ * Release the EGL surface.
+ */
+ public void releaseEglSurface() {
+ mEglBase.releaseSurface(mEGLSurface);
+ mEGLSurface = EGL14.EGL_NO_SURFACE;
+ mWidth = mHeight = -1;
+ }
+
+ /**
+ * Makes our EGL context and surface current.
+ */
+ public void makeCurrent() {
+ mEglBase.makeCurrent(mEGLSurface);
+ }
+
+ /**
+ * Calls eglSwapBuffers. Use this to "publish" the current frame.
+ *
+ * @return false on failure
+ */
+ public boolean swapBuffers() {
+ return mEglBase.swapBuffers(mEGLSurface);
+ }
+
+ /**
+ * Sends the presentation time stamp to EGL.
+ *
+ * @param nsecs Timestamp, in nanoseconds.
+ */
+ public void setPresentationTime(long nsecs) {
+ mEglBase.setPresentationTime(mEGLSurface, nsecs);
+ }
+
+ /**
+ * Saves the EGL surface to a file.
+ *
+ * Expects that this object's EGL surface is current.
+ */
+ public void saveFrame(File file) throws IOException {
+ if (!mEglBase.isCurrent(mEGLSurface)) {
+ throw new RuntimeException("Expected EGL context/surface is not current");
+ }
+
+ // glReadPixels gives us a ByteBuffer filled with what is essentially big-endian RGBA
+ // data (i.e. a byte of red, followed by a byte of green...). We need an int[] filled
+ // with little-endian ARGB data to feed to Bitmap.
+ //
+ // If we implement this as a series of buf.get() calls, we can spend 2.5 seconds just
+ // copying data around for a 720p frame. It's better to do a bulk get() and then
+ // rearrange the data in memory. (For comparison, the PNG compress takes about 500ms
+ // for a trivial frame.)
+ //
+ // So... we set the ByteBuffer to little-endian, which should turn the bulk IntBuffer
+ // get() into a straight memcpy on most Android devices. Our ints will hold ABGR data.
+ // Swapping B and R gives us ARGB.
+ //
+ // Making this even more interesting is the upside-down nature of GL, which means
+ // our output will look upside-down relative to what appears on screen if the
+ // typical GL conventions are used.
+
+ String filename = file.toString();
+
+ ByteBuffer buf = ByteBuffer.allocateDirect(mWidth * mHeight * 4);
+ buf.order(ByteOrder.LITTLE_ENDIAN);
+ GLES20.glReadPixels(0, 0, mWidth, mHeight,
+ GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
+ buf.rewind();
+
+ int pixelCount = mWidth * mHeight;
+ int[] colors = new int[pixelCount];
+ buf.asIntBuffer().get(colors);
+ for (int i = 0; i < pixelCount; i++) {
+ int c = colors[i];
+ colors[i] = (c & 0xff00ff00) | ((c & 0x00ff0000) >> 16) | ((c & 0x000000ff) << 16);
+ }
+
+ BufferedOutputStream bos = null;
+ try {
+ bos = new BufferedOutputStream(new FileOutputStream(filename));
+ Bitmap bmp = Bitmap.createBitmap(colors, mWidth, mHeight, Bitmap.Config.ARGB_8888);
+ bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
+ bmp.recycle();
+ } finally {
+ if (bos != null) bos.close();
+ }
+ Log.d(TAG, "Saved " + mWidth + "x" + mHeight + " frame as '" + filename + "'");
+ }
+}
diff --git a/src/com/android/grafika/FileUtils.java b/src/com/android/grafika/FileUtils.java
new file mode 100644
index 0000000..62747e0
--- /dev/null
+++ b/src/com/android/grafika/FileUtils.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.util.Log;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.util.Arrays;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Some handy utilities.
+ */
+public class FileUtils {
+ private static final String TAG = MainActivity.TAG;
+
+ private FileUtils() {}
+
+ /**
+ * Obtains a list of files that live in the specified directory and match the glob pattern.
+ */
+ public static String[] getFiles(File dir, String glob) {
+ String regex = globToRegex(glob);
+ final Pattern pattern = Pattern.compile(regex);
+ String[] result = dir.list(new FilenameFilter() {
+ @Override public boolean accept(File dir, String name) {
+ Matcher matcher = pattern.matcher(name);
+ return matcher.matches();
+ }
+ });
+ Arrays.sort(result);
+
+ return result;
+ }
+
+ /**
+ * Converts a filename globbing pattern to a regular expression.
+ *
+ * The regex is suitable for use by Matcher.matches(), which matches the entire string, so
+ * we don't specify leading '^' or trailing '$'.
+ */
+ private static String globToRegex(String glob) {
+ // Quick, overly-simplistic implementation -- just want to handle something simple
+ // like "*.mp4".
+ //
+ // See e.g. http://stackoverflow.com/questions/1247772/ for a more thorough treatment.
+ StringBuilder regex = new StringBuilder(glob.length());
+ //regex.append('^');
+ for (char ch : glob.toCharArray()) {
+ switch (ch) {
+ case '*':
+ regex.append(".*");
+ break;
+ case '?':
+ regex.append('.');
+ break;
+ case '.':
+ regex.append("\\.");
+ break;
+ default:
+ regex.append(ch);
+ break;
+ }
+ }
+ //regex.append('$');
+ return regex.toString();
+ }
+}
diff --git a/src/com/android/grafika/GeneratedMovie.java b/src/com/android/grafika/GeneratedMovie.java
new file mode 100644
index 0000000..bbfe027
--- /dev/null
+++ b/src/com/android/grafika/GeneratedMovie.java
@@ -0,0 +1,226 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.util.Log;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/**
+ * Base class for generated movies.
+ */
+public abstract class GeneratedMovie implements Content {
+ private static final String TAG = MainActivity.TAG;
+ private static final boolean VERBOSE = false;
+
+ private static final int IFRAME_INTERVAL = 5;
+
+ // set by sub-class to indicate that the movie has been generated
+ // TODO: remove this now?
+ protected boolean mMovieReady = false;
+
+ // "live" state during recording
+ private MediaCodec.BufferInfo mBufferInfo;
+ private MediaCodec mEncoder;
+ private MediaMuxer mMuxer;
+ private EglCore mEglCore;
+ private WindowSurface mInputSurface;
+ private int mTrackIndex;
+ private boolean mMuxerStarted;
+
+ /**
+ * Creates the movie content. Usually called from an async task thread.
+ */
+ public abstract void create(File outputFile, ContentManager.ProgressUpdater prog);
+
+ /**
+ * Prepares the video encoder, muxer, and an EGL input surface.
+ */
+ protected void prepareEncoder(String mimeType, int width, int height, int bitRate,
+ int framesPerSecond, File outputFile) {
+ mBufferInfo = new MediaCodec.BufferInfo();
+
+ MediaFormat format = MediaFormat.createVideoFormat(mimeType, width, height);
+
+ // Set some properties. Failing to specify some of these can cause the MediaCodec
+ // configure() call to throw an unhelpful exception.
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
+ MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, framesPerSecond);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
+ if (VERBOSE) Log.d(TAG, "format: " + format);
+
+ // Create a MediaCodec encoder, and configure it with our format. Get a Surface
+ // we can use for input and wrap it with a class that handles the EGL work.
+ mEncoder = MediaCodec.createEncoderByType(mimeType);
+ mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+ mEglCore = new EglCore(null, EglCore.FLAG_RECORDABLE);
+ mInputSurface = new WindowSurface(mEglCore, mEncoder.createInputSurface());
+ mInputSurface.makeCurrent();
+ mEncoder.start();
+
+ // Create a MediaMuxer. We can't add the video track and start() the muxer here,
+ // because our MediaFormat doesn't have the Magic Goodies. These can only be
+ // obtained from the encoder after it has started processing data.
+ //
+ // We're not actually interested in multiplexing audio. We just want to convert
+ // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
+ try {
+ if (VERBOSE) Log.d(TAG, "output will go to " + outputFile);
+ mMuxer = new MediaMuxer(outputFile.toString(),
+ MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
+ } catch (IOException ioe) {
+ throw new RuntimeException("MediaMuxer creation failed", ioe);
+ }
+
+ mTrackIndex = -1;
+ mMuxerStarted = false;
+ }
+
+ /**
+ * Releases encoder resources. May be called after partial / failed initialization.
+ */
+ protected void releaseEncoder() {
+ if (VERBOSE) Log.d(TAG, "releasing encoder objects");
+ if (mEncoder != null) {
+ mEncoder.stop();
+ mEncoder.release();
+ mEncoder = null;
+ }
+ if (mInputSurface != null) {
+ mInputSurface.release();
+ mInputSurface = null;
+ }
+ if (mEglCore != null) {
+ mEglCore.release();
+ mEglCore = null;
+ }
+ if (mMuxer != null) {
+ mMuxer.stop();
+ mMuxer.release();
+ mMuxer = null;
+ }
+ }
+
+ /**
+ * Submits a frame to the encoder.
+ *
+ * @param presentationTimeNsec The presentation time stamp, in nanoseconds.
+ */
+ protected void submitFrame(long presentationTimeNsec) {
+ // The eglSwapBuffers call will block if the input is full, which would be bad if
+ // it stayed full until we dequeued an output buffer (which we can't do, since we're
+ // stuck here). So long as the caller fully drains the encoder before supplying
+ // additional input, the system guarantees that we can supply another frame
+ // without blocking.
+ mInputSurface.setPresentationTime(presentationTimeNsec);
+ mInputSurface.swapBuffers();
+ }
+
+ /**
+ * Extracts all pending data from the encoder.
+ *
+ * If endOfStream is not set, this returns when there is no more data to drain. If it
+ * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
+ * Calling this with endOfStream set should be done once, right before stopping the muxer.
+ */
+ protected void drainEncoder(boolean endOfStream) {
+ final int TIMEOUT_USEC = 10000;
+ if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
+
+ if (endOfStream) {
+ if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
+ mEncoder.signalEndOfInputStream();
+ }
+
+ ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
+ while (true) {
+ int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
+ if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
+ // no output available yet
+ if (!endOfStream) {
+ break; // out of while
+ } else {
+ if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
+ }
+ } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ // not expected for an encoder
+ encoderOutputBuffers = mEncoder.getOutputBuffers();
+ } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ // should happen before receiving buffers, and should only happen once
+ if (mMuxerStarted) {
+ throw new RuntimeException("format changed twice");
+ }
+ MediaFormat newFormat = mEncoder.getOutputFormat();
+ Log.d(TAG, "encoder output format changed: " + newFormat);
+
+ // now that we have the Magic Goodies, start the muxer
+ mTrackIndex = mMuxer.addTrack(newFormat);
+ mMuxer.start();
+ mMuxerStarted = true;
+ } else if (encoderStatus < 0) {
+ Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
+ encoderStatus);
+ // let's ignore it
+ } else {
+ ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
+ if (encodedData == null) {
+ throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
+ " was null");
+ }
+
+ if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ // The codec config data was pulled out and fed to the muxer when we got
+ // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
+ if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
+ mBufferInfo.size = 0;
+ }
+
+ if (mBufferInfo.size != 0) {
+ if (!mMuxerStarted) {
+ throw new RuntimeException("muxer hasn't started");
+ }
+
+ // adjust the ByteBuffer values to match BufferInfo (not needed?)
+ encodedData.position(mBufferInfo.offset);
+ encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
+
+ mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
+ if (VERBOSE) Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer");
+ }
+
+ mEncoder.releaseOutputBuffer(encoderStatus, false);
+
+ if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ if (!endOfStream) {
+ Log.w(TAG, "reached end of stream unexpectedly");
+ } else {
+ if (VERBOSE) Log.d(TAG, "end of stream reached");
+ }
+ break; // out of while
+ }
+ }
+ }
+ }
+}
diff --git a/src/com/android/grafika/LiveCameraActivity.java b/src/com/android/grafika/LiveCameraActivity.java
new file mode 100644
index 0000000..4db6664
--- /dev/null
+++ b/src/com/android/grafika/LiveCameraActivity.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.app.Activity;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.TextureView;
+
+import java.io.IOException;
+
+/**
+ * More or less straight out of TextureView's doc.
+ *
+ * TODO: add options for different display sizes, frame rates, camera selection, etc.
+ */
+public class LiveCameraActivity extends Activity implements TextureView.SurfaceTextureListener {
+ private static final String TAG = MainActivity.TAG;
+
+ private Camera mCamera;
+ private TextureView mTextureView;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ mTextureView = new TextureView(this);
+ mTextureView.setSurfaceTextureListener(this);
+
+ setContentView(mTextureView);
+ }
+
+ @Override
+ public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
+ mCamera = Camera.open();
+ if (mCamera == null) {
+ // Seeing this on Nexus 7 2012 -- I guess it wants a rear-facing camera, but
+ // there isn't one. TODO: fix
+ throw new RuntimeException("Default camera not available");
+ }
+
+ try {
+ mCamera.setPreviewTexture(surface);
+ mCamera.startPreview();
+ } catch (IOException ioe) {
+ // Something bad happened
+ }
+ }
+
+ @Override
+ public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
+ // Ignored, Camera does all the work for us
+ }
+
+ @Override
+ public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
+ mCamera.stopPreview();
+ mCamera.release();
+ return true;
+ }
+
+ @Override
+ public void onSurfaceTextureUpdated(SurfaceTexture surface) {
+ // Invoked every time there's a new Camera preview frame
+ //Log.d(TAG, "updated, ts=" + surface.getTimestamp());
+ }
+}
diff --git a/src/com/android/grafika/MainActivity.java b/src/com/android/grafika/MainActivity.java
new file mode 100644
index 0000000..e5a9d3b
--- /dev/null
+++ b/src/com/android/grafika/MainActivity.java
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.os.Bundle;
+import android.app.Activity;
+import android.content.Intent;
+import android.util.Log;
+import android.view.Menu;
+import android.view.MenuItem;
+import android.view.View;
+import android.widget.AdapterView;
+import android.widget.ArrayAdapter;
+import android.widget.Spinner;
+import android.widget.AdapterView.OnItemSelectedListener;
+
+/**
+ * Main activity -- entry point from Launcher.
+ */
+public class MainActivity extends Activity implements OnItemSelectedListener {
+ public static final String TAG = "Grafika";
+ public static final Class[] TEST_ACTIVITIES = {
+ // The content and order MUST match the "test_names" string-array.
+ CameraCaptureActivity.class,
+ PlayMovieActivity.class,
+ ReadPixelsActivity.class,
+ TextureViewGLActivity.class,
+ LiveCameraActivity.class,
+ RecordFBOActivity.class,
+ };
+
+ private int mSelectedTest = 0;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_main);
+
+ // One-time singleton initialization; requires activity context to get file location.
+ ContentManager.initialize(this);
+
+ // Populate test-selection spinner.
+ Spinner spinner = (Spinner) findViewById(R.id.selectTest_spinner);
+ // Need to create one of these fancy ArrayAdapter thingies, and specify the generic layout
+ // for the widget itself.
+ ArrayAdapter
+ * To add a little flavor, the timing of the frames speeds up as the movie continues.
+ */
+public class MovieEightRects extends GeneratedMovie {
+ private static final String TAG = MainActivity.TAG;
+
+ private static final String MIME_TYPE = "video/avc";
+ private static final int WIDTH = 320;
+ private static final int HEIGHT = 240;
+ private static final int BIT_RATE = 2000000;
+ private static final int NUM_FRAMES = 32;
+ private static final int FRAMES_PER_SECOND = 30;
+
+ // RGB color values for generated frames
+ private static final int TEST_R0 = 0;
+ private static final int TEST_G0 = 136;
+ private static final int TEST_B0 = 0;
+ private static final int TEST_R1 = 236;
+ private static final int TEST_G1 = 50;
+ private static final int TEST_B1 = 186;
+
+ @Override
+ public void create(File outputFile, ContentManager.ProgressUpdater prog) {
+ if (mMovieReady) {
+ throw new RuntimeException("Already created");
+ }
+
+ try {
+ prepareEncoder(MIME_TYPE, WIDTH, HEIGHT, BIT_RATE, FRAMES_PER_SECOND, outputFile);
+
+ for (int i = 0; i < NUM_FRAMES; i++) {
+ // Drain any data from the encoder into the muxer.
+ drainEncoder(false);
+
+ // Generate a frame and submit it.
+ generateFrame(i);
+ submitFrame(computePresentationTimeNsec(i));
+
+ prog.updateProgress(i * 100 / NUM_FRAMES);
+ }
+
+ // Send end-of-stream and drain remaining output.
+ drainEncoder(true);
+ } finally {
+ releaseEncoder();
+ }
+
+ Log.d(TAG, "MovieEightRects complete: " + outputFile);
+ mMovieReady = true;
+ }
+
+ /**
+ * Generates a frame of data using GL commands. We have an 8-frame animation
+ * sequence that wraps around. It looks like this:
+ *
+ * First 8 frames at 8 fps, next 8 at 16fps, rest at 30fps.
+ */
+ private static long computePresentationTimeNsec(int frameIndex) {
+ final long ONE_BILLION = 1000000000;
+ long time;
+ if (frameIndex < 8) {
+ // 8 fps
+ return frameIndex * ONE_BILLION / 8;
+ } else {
+ time = ONE_BILLION;
+ frameIndex -= 8;
+ }
+ if (frameIndex < 8) {
+ return time + frameIndex * ONE_BILLION / 16;
+ } else {
+ time += ONE_BILLION / 2;
+ frameIndex -= 8;
+ }
+ return time + frameIndex * ONE_BILLION / 30;
+ }
+}
diff --git a/src/com/android/grafika/MoviePlayer.java b/src/com/android/grafika/MoviePlayer.java
new file mode 100644
index 0000000..4776e43
--- /dev/null
+++ b/src/com/android/grafika/MoviePlayer.java
@@ -0,0 +1,267 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.util.Log;
+import android.view.Surface;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+
+/**
+ * Plays the video track from a movie file to a Surface.
+ *
+ * TODO: needs more advanced shuttle controls (pause/resume, skip)
+ */
+public class MoviePlayer {
+ private static final String TAG = MainActivity.TAG;
+ private static final boolean VERBOSE = false;
+
+ // Declare this here to reduce allocations.
+ private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
+
+ private File mSourceFile;
+ private Surface mOutputSurface;
+ private boolean mLoop;
+
+ public MoviePlayer(File sourceFile, Surface outputSurface) {
+ mSourceFile = sourceFile;
+ mOutputSurface = outputSurface;
+ }
+
+ /**
+ * Callback invoked when rendering video frames. The MoviePlayer client must
+ * provide one of these.
+ */
+ public interface FrameCallback {
+ /**
+ * Called immediately before the frame is rendered.
+ * @param presentationTimeUsec The desired presentation time, in microseconds.
+ */
+ void preRender(long presentationTimeUsec);
+
+ /**
+ * Called immediately after the frame render call returns. The frame may not have
+ * actually been rendered yet.
+ * TODO: is this actually useful?
+ */
+ void postRender();
+
+ /**
+ * Called after the last frame of a looped movie has been rendered. This allows the
+ * callback to adjust its expectations of the next presentation time stamp.
+ */
+ void loopReset();
+
+ /**
+ * Called between frames to see if we want to stop playback.
+ */
+ boolean isStopRequested();
+ }
+
+ /**
+ * Sets the loop mode. If true, playback will loop forever.
+ */
+ public void setLoopMode(boolean loopMode) {
+ mLoop = loopMode;
+ }
+
+ /**
+ * Decodes the video stream, sending frames to the surface.
+ */
+ public void play(FrameCallback frameCallback) throws IOException {
+ MediaExtractor extractor = null;
+ MediaCodec decoder = null;
+
+ // The MediaExtractor error messages aren't very useful. Check to see if the input
+ // file exists so we can throw a better one if it's not there.
+ if (!mSourceFile.canRead()) {
+ throw new FileNotFoundException("Unable to read " + mSourceFile);
+ }
+
+ try {
+ extractor = new MediaExtractor();
+ extractor.setDataSource(mSourceFile.toString());
+ int trackIndex = selectTrack(extractor);
+ if (trackIndex < 0) {
+ throw new RuntimeException("No video track found in " + mSourceFile);
+ }
+ extractor.selectTrack(trackIndex);
+
+ MediaFormat format = extractor.getTrackFormat(trackIndex);
+ if (VERBOSE) {
+ Log.d(TAG, "Video size is " + format.getInteger(MediaFormat.KEY_WIDTH) + "x" +
+ format.getInteger(MediaFormat.KEY_HEIGHT));
+ }
+
+ // Create a MediaCodec decoder, and configure it with the MediaFormat from the
+ // extractor. It's very important to use the format from the extractor because
+ // it contains a copy of the CSD-0/CSD-1 codec-specific data chunks.
+ String mime = format.getString(MediaFormat.KEY_MIME);
+ decoder = MediaCodec.createDecoderByType(mime);
+ decoder.configure(format, mOutputSurface, null, 0);
+ decoder.start();
+
+ doExtract(extractor, trackIndex, decoder, frameCallback);
+ } finally {
+ // release everything we grabbed
+ if (decoder != null) {
+ decoder.stop();
+ decoder.release();
+ decoder = null;
+ }
+ if (extractor != null) {
+ extractor.release();
+ extractor = null;
+ }
+ }
+ }
+
+ /**
+ * Selects the video track, if any.
+ *
+ * @return the track index, or -1 if no video track is found.
+ */
+ private static int selectTrack(MediaExtractor extractor) {
+ // Select the first video track we find, ignore the rest.
+ int numTracks = extractor.getTrackCount();
+ for (int i = 0; i < numTracks; i++) {
+ MediaFormat format = extractor.getTrackFormat(i);
+ String mime = format.getString(MediaFormat.KEY_MIME);
+ if (mime.startsWith("video/")) {
+ if (VERBOSE) {
+ Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
+ }
+ return i;
+ }
+ }
+
+ return -1;
+ }
+
+ /**
+ * Work loop.
+ */
+ private void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder,
+ FrameCallback frameCallback) {
+ final int TIMEOUT_USEC = 10000;
+ ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
+ int inputChunk = 0;
+
+ boolean outputDone = false;
+ boolean inputDone = false;
+ while (!outputDone) {
+ if (VERBOSE) Log.d(TAG, "loop");
+ if (frameCallback.isStopRequested()) {
+ Log.d(TAG, "Stop requested");
+ return;
+ }
+
+ // Feed more data to the decoder.
+ if (!inputDone) {
+ int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
+ if (inputBufIndex >= 0) {
+ ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
+ // Read the sample data into the ByteBuffer. This neither respects nor
+ // updates inputBuf's position, limit, etc.
+ int chunkSize = extractor.readSampleData(inputBuf, 0);
+ if (chunkSize < 0) {
+ // End of stream -- send empty frame with EOS flag set.
+ decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
+ MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+ inputDone = true;
+ if (VERBOSE) Log.d(TAG, "sent input EOS");
+ } else {
+ if (extractor.getSampleTrackIndex() != trackIndex) {
+ Log.w(TAG, "WEIRD: got sample from track " +
+ extractor.getSampleTrackIndex() + ", expected " + trackIndex);
+ }
+ long presentationTimeUs = extractor.getSampleTime();
+ decoder.queueInputBuffer(inputBufIndex, 0, chunkSize,
+ presentationTimeUs, 0 /*flags*/);
+ if (VERBOSE) {
+ Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
+ chunkSize);
+ }
+ inputChunk++;
+ extractor.advance();
+ }
+ } else {
+ if (VERBOSE) Log.d(TAG, "input buffer not available");
+ }
+ }
+
+ if (!outputDone) {
+ int decoderStatus = decoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
+ if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
+ // no output available yet
+ if (VERBOSE) Log.d(TAG, "no output from decoder available");
+ } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ // not important for us, since we're using Surface
+ if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
+ } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ MediaFormat newFormat = decoder.getOutputFormat();
+ if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
+ } else if (decoderStatus < 0) {
+ throw new RuntimeException(
+ "unexpected result from decoder.dequeueOutputBuffer: " +
+ decoderStatus);
+ } else { // decoderStatus >= 0
+ boolean doLoop = false;
+ if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
+ " (size=" + mBufferInfo.size + ")");
+ if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ if (VERBOSE) Log.d(TAG, "output EOS");
+ if (mLoop) {
+ doLoop = true;
+ } else {
+ outputDone = true;
+ }
+ }
+
+ boolean doRender = (mBufferInfo.size != 0);
+
+ // As soon as we call releaseOutputBuffer, the buffer will be forwarded
+ // to SurfaceTexture to convert to a texture. We can't control when it
+ // appears on-screen, but we can manage the pace at which we release
+ // the buffers.
+ if (doRender && frameCallback != null) {
+ frameCallback.preRender(mBufferInfo.presentationTimeUs);
+ }
+ decoder.releaseOutputBuffer(decoderStatus, doRender);
+ if (doRender && frameCallback != null) {
+ frameCallback.postRender();
+ }
+
+ if (doLoop) {
+ Log.d(TAG, "Reached EOS, looping");
+ extractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+ inputDone = false;
+ decoder.flush(); // reset decoder state
+ frameCallback.loopReset();
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/com/android/grafika/MovieSliders.java b/src/com/android/grafika/MovieSliders.java
new file mode 100644
index 0000000..feaf50b
--- /dev/null
+++ b/src/com/android/grafika/MovieSliders.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.opengl.GLES20;
+import android.util.Log;
+
+import java.io.File;
+
+/**
+ * Generates a simple movie, featuring two small rectangles that slide across the screen.
+ */
+public class MovieSliders extends GeneratedMovie {
+ private static final String TAG = MainActivity.TAG;
+
+ private static final String MIME_TYPE = "video/avc";
+ private static final int WIDTH = 640;
+ private static final int HEIGHT = 480;
+ private static final int BIT_RATE = 5000000;
+ private static final int FRAMES_PER_SECOND = 30;
+
+ @Override
+ public void create(File outputFile, ContentManager.ProgressUpdater prog) {
+ if (mMovieReady) {
+ throw new RuntimeException("Already created");
+ }
+
+ final int NUM_FRAMES = 240;
+
+ try {
+ prepareEncoder(MIME_TYPE, WIDTH, HEIGHT, BIT_RATE, FRAMES_PER_SECOND, outputFile);
+
+ for (int i = 0; i < NUM_FRAMES; i++) {
+ // Drain any data from the encoder into the muxer.
+ drainEncoder(false);
+
+ // Generate a frame and submit it.
+ generateFrame(i);
+ submitFrame(computePresentationTimeNsec(i));
+
+ prog.updateProgress(i * 100 / NUM_FRAMES);
+ }
+
+ // Send end-of-stream and drain remaining output.
+ drainEncoder(true);
+ } finally {
+ releaseEncoder();
+ }
+
+ Log.d(TAG, "MovieEightRects complete: " + outputFile);
+ mMovieReady = true;
+ }
+
+ /**
+ * Generates a frame of data using GL commands.
+ */
+ private void generateFrame(int frameIndex) {
+ final int BOX_SIZE = 80;
+ frameIndex %= 240;
+ int xpos, ypos;
+
+ int absIndex = Math.abs(frameIndex - 120);
+ xpos = absIndex * WIDTH / 120;
+ ypos = absIndex * HEIGHT / 120;
+
+ float lumaf = absIndex / 120.0f;
+
+ GLES20.glClearColor(lumaf, lumaf, lumaf, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+
+ GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
+ GLES20.glScissor(BOX_SIZE / 2, ypos, BOX_SIZE, BOX_SIZE);
+ GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glScissor(xpos, BOX_SIZE / 2, BOX_SIZE, BOX_SIZE);
+ GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
+ }
+
+ /**
+ * Generates the presentation time for frame N, in nanoseconds. Fixed frame rate.
+ */
+ private static long computePresentationTimeNsec(int frameIndex) {
+ final long ONE_BILLION = 1000000000;
+ return frameIndex * ONE_BILLION / FRAMES_PER_SECOND;
+ }
+}
diff --git a/src/com/android/grafika/OffscreenSurface.java b/src/com/android/grafika/OffscreenSurface.java
new file mode 100644
index 0000000..81e5563
--- /dev/null
+++ b/src/com/android/grafika/OffscreenSurface.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+/**
+ * Off-screen EGL surface (pbuffer).
+ *
+ * It's good practice to explicitly release() the surface, preferably from a "finally" block.
+ */
+public class OffscreenSurface extends EglSurfaceBase {
+ /**
+ * Creates an off-screen surface with the specified width and height.
+ */
+ public OffscreenSurface(EglCore eglBase, int width, int height) {
+ super(eglBase);
+ createOffscreenSurface(width, height);
+ }
+
+ /**
+ * Releases any resources associated with the surface.
+ */
+ public void release() {
+ releaseEglSurface();
+ }
+}
diff --git a/src/com/android/grafika/PlayMovieActivity.java b/src/com/android/grafika/PlayMovieActivity.java
new file mode 100644
index 0000000..fafeee1
--- /dev/null
+++ b/src/com/android/grafika/PlayMovieActivity.java
@@ -0,0 +1,222 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.os.AsyncTask;
+import android.os.Bundle;
+import android.app.Activity;
+import android.graphics.SurfaceTexture;
+import android.util.Log;
+import android.view.Surface;
+import android.view.TextureView;
+import android.view.View;
+import android.widget.AdapterView;
+import android.widget.ArrayAdapter;
+import android.widget.Button;
+import android.widget.CheckBox;
+import android.widget.Spinner;
+import android.widget.AdapterView.OnItemSelectedListener;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Play a movie from a file on disk. Output goes to a TextureView.
+ *
+ * Currently video-only.
+ */
+public class PlayMovieActivity extends Activity implements OnItemSelectedListener {
+ private static final String TAG = MainActivity.TAG;
+
+ private TextureView mTextureView;
+ private String[] mMovieFiles;
+ private int mSelectedMovie;
+ private boolean mShowStopLabel;
+ private PlayMovieTask mPlayTask;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_play_movie);
+
+ mTextureView = (TextureView) findViewById(R.id.movie_texture_view);
+
+ // Populate file-selection spinner.
+ Spinner spinner = (Spinner) findViewById(R.id.playMovieFile_spinner);
+ // Need to create one of these fancy ArrayAdapter thingies, and specify the generic layout
+ // for the widget itself.
+ mMovieFiles = FileUtils.getFiles(getFilesDir(), "*.mp4");
+ ArrayAdapter
+ * The goal here is to play back frames at the original rate. This is done by introducing
+ * a pause before the frame is submitted to the renderer.
+ */
+public class SpeedControlCallback implements MoviePlayer.FrameCallback {
+ private static final String TAG = MainActivity.TAG;
+
+ private static final int RUNNING = 0;
+ private static final int STOPPING = 1;
+ private static final long ONE_MILLION = 1000000L;
+
+ private volatile int mState = RUNNING;
+ private long mPrevPresentUsec;
+ private long mPrevMonoUsec;
+ private long mFixedFrameDurationUsec;
+ private boolean mLoopReset;
+
+ /**
+ * Sets a fixed playback rate. If set, this will ignore the presentation time stamp
+ * in the video file. Must be called before playback thread starts.
+ */
+ public void setFixedPlaybackRate(int fps) {
+ mFixedFrameDurationUsec = ONE_MILLION / fps;
+ }
+
+ /**
+ * Requests an immediate stop to playback. May be called from an arbitrary thread.
+ */
+ public void requestStop() {
+ mState = STOPPING;
+ }
+
+ @Override
+ public boolean isStopRequested() {
+ return mState != RUNNING;
+ }
+
+ @Override
+ public void preRender(long presentationTimeUsec) {
+ // For the first frame, we grab the presentation time from the video
+ // and the current monotonic clock time. For subsequent frames, we
+ // sleep for a bit to try to ensure that we're rendering frames at the
+ // pace dictated by the video stream.
+ //
+ // If the frame rate is faster than vsync we should be dropping frames. On
+ // Android 4.4 this may not be happening.
+
+ if (mPrevMonoUsec == 0) {
+ // Latch current values, then return immediately.
+ mPrevMonoUsec = System.nanoTime() / 1000;
+ mPrevPresentUsec = presentationTimeUsec;
+ } else {
+ // Compute the desired time delta between the previous frame and this frame.
+ long frameDelta;
+ if (mLoopReset) {
+ // We don't get an indication of how long the last frame should appear
+ // on-screen, so we just throw a reasonable value in. We could probably
+ // do better by using a previous frame duration or some sort of average;
+ // for now we just use 30fps.
+ mPrevPresentUsec = presentationTimeUsec - ONE_MILLION / 30;
+ mLoopReset = false;
+ }
+ if (mFixedFrameDurationUsec != 0) {
+ // Caller requested a fixed frame rate. Ignore PTS.
+ frameDelta = mFixedFrameDurationUsec;
+ } else {
+ frameDelta = presentationTimeUsec - mPrevPresentUsec;
+ }
+ if (frameDelta < 0) {
+ Log.w(TAG, "Weird, video times went backward");
+ frameDelta = 0;
+ } else if (frameDelta == 0) {
+ // This suggests a possible bug in movie generation.
+ Log.i(TAG, "Warning: current frame and previous frame had same timestamp");
+ } else if (frameDelta > 10 * ONE_MILLION) {
+ // Inter-frame times could be arbitrarily long. For this player, we want
+ // to alert the developer that their movie might have issues (maybe they
+ // accidentally output timestamps in nsec rather than usec).
+ Log.i(TAG, "Inter-frame pause was " + (frameDelta / ONE_MILLION) +
+ "sec, capping at 5 sec");
+ frameDelta = 5 * ONE_MILLION;
+ }
+
+ long desiredUsec = mPrevMonoUsec + frameDelta; // when we want to wake up
+ long nowUsec = System.nanoTime() / 1000;
+ while (nowUsec < (desiredUsec - 100) && mState == RUNNING) {
+ // Sleep until it's time to wake up. To be responsive to "stop" commands
+ // we're going to wake up every half a second even if the sleep is supposed
+ // to be longer (which should be rare). The alternative would be
+ // to interrupt the thread, but that requires more work.
+ //
+ // The precision of the sleep call varies widely from one device to another;
+ // we may wake early or late. Different devices will have a minimum possible
+ // sleep time. If we're within 100us of the target time, we'll probably
+ // overshoot if we try to sleep, so just go ahead and continue on.
+ long sleepTimeUsec = desiredUsec - nowUsec;
+ if (sleepTimeUsec > 500000) {
+ sleepTimeUsec = 500000;
+ }
+ try {
+ Thread.sleep(sleepTimeUsec / 1000, (int) (sleepTimeUsec % 1000) * 1000);
+ } catch (InterruptedException ie) {}
+ nowUsec = System.nanoTime() / 1000;
+ }
+
+ // Advance times using calculated time values, not the post-sleep monotonic
+ // clock time, to avoid drifting.
+ mPrevMonoUsec += frameDelta;
+ mPrevPresentUsec += frameDelta;
+ }
+ }
+
+ @Override public void postRender() {}
+
+ @Override
+ public void loopReset() {
+ mLoopReset = true;
+ }
+}
diff --git a/src/com/android/grafika/TextureMovieEncoder.java b/src/com/android/grafika/TextureMovieEncoder.java
new file mode 100644
index 0000000..e804251
--- /dev/null
+++ b/src/com/android/grafika/TextureMovieEncoder.java
@@ -0,0 +1,508 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.opengl.EGLContext;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.util.Log;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+
+/**
+ * Encode a movie from frames rendered from an external texture image.
+ *
+ * The object wraps an encoder running on a dedicated thread. The various control messages
+ * may be sent from arbitrary threads (typically the app UI thread).
+ */
+public class TextureMovieEncoder implements Runnable {
+ private static final String TAG = MainActivity.TAG;
+ private static final boolean VERBOSE = false;
+
+ private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
+ private static final int FRAME_RATE = 30; // 30fps
+ private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
+
+ private static final int MSG_START_RECORDING = 0;
+ private static final int MSG_STOP_RECORDING = 1;
+ private static final int MSG_FRAME_AVAILABLE = 2;
+ private static final int MSG_SET_TEXTURE_ID = 3;
+ private static final int MSG_UPDATE_SHARED_CONTEXT = 4;
+ private static final int MSG_QUIT = 5;
+
+ // ----- accessed by encoder thread -----
+ private EglCore mEglBase;
+ private WindowSurface mInputWindowSurface;
+ private MediaMuxer mMuxer;
+ private MediaCodec mEncoder;
+ private MediaCodec.BufferInfo mBufferInfo;
+ private int mTrackIndex;
+ private boolean mMuxerStarted;
+ private TextureRender mTextureRender;
+ private int mTextureId;
+ private int mFrameNum;
+
+ // ----- accessed by multiple threads -----
+ private volatile EncoderHandler mHandler;
+
+ private Object mReadyFence = new Object(); // guards ready/running
+ private boolean mReady;
+ private boolean mRunning;
+
+
+ /**
+ * Encoder configuration.
+ *
+ * Object is immutable, which means we can safely pass it between threads (and don't need
+ * to worry about it getting tweaked out from under us).
+ */
+ public static class EncoderConfig {
+ final File mOutputFile;
+ final int mWidth;
+ final int mHeight;
+ final int mBitRate;
+ final EGLContext mEglContext;
+
+ public EncoderConfig(File outputFile, int width, int height, int bitRate,
+ EGLContext sharedEglContext) {
+ mOutputFile = outputFile;
+ mWidth = width;
+ mHeight = height;
+ mBitRate = bitRate;
+ mEglContext = sharedEglContext;
+ }
+
+ @Override
+ public String toString() {
+ return "EncoderConfig: " + mWidth + "x" + mHeight + " @" + mBitRate +
+ " to '" + mOutputFile.toString() + "' ctxt=" + mEglContext;
+ }
+ }
+
+ /**
+ * Tells the video recorder to start recording. (Call from UI or camera thread.)
+ *
+ * Returns after the recorder thread has started and is ready to accept Messages. The
+ * encoder may not yet be fully configured.
+ */
+ public void startRecording(EncoderConfig config) {
+ Log.d(TAG, "Encoder: startRecording()");
+ synchronized (mReadyFence) {
+ if (mRunning) {
+ Log.w(TAG, "Encoder thread already running");
+ return;
+ }
+ mRunning = true;
+ new Thread(this, "TextureMovieEncoder").start();
+ while (!mReady) {
+ try {
+ mReadyFence.wait();
+ } catch (InterruptedException ie) {
+ // ignore
+ }
+ }
+ }
+
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
+ }
+
+ /**
+ * Tells the video recorder to stop recording. (Call from UI or camera thread.)
+ *
+ * Returns immediately; the encoder/muxer may not yet be finished creating the movie.
+ *
+ * TODO: have the encoder thread invoke a callback on the UI thread just before it shuts down
+ * so we can provide reasonable status UI (and let the caller know that movie encoding
+ * has completed).
+ */
+ public void stopRecording() {
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
+ // We don't know when these will actually finish (or even start). We don't want to
+ // delay the UI thread though, so we return immediately.
+ }
+
+ /**
+ * Returns true if recording has been started.
+ */
+ public boolean isRecording() {
+ synchronized (mReadyFence) {
+ return mRunning;
+ }
+ }
+
+ /**
+ * Tells the video recorder to refresh its EGL surface. (Call from UI or camera thread.)
+ */
+ public void updateSharedContext(EGLContext sharedContext) {
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, sharedContext));
+ }
+
+ /**
+ * Tells the video recorder that a new frame is available. (Call from UI or camera thread.)
+ */
+ public void frameAvailable(SurfaceTexture st) {
+ synchronized (mReadyFence) {
+ if (!mReady) {
+ return;
+ }
+ }
+
+ float[] transform = new float[16]; // TODO - avoid alloc every frame
+ st.getTransformMatrix(transform);
+ long timestamp = st.getTimestamp();
+ if (timestamp == 0) {
+ // Seeing this after device is toggled off/on with power button. The
+ // first frame back has a zero timestamp.
+ //
+ // MPEG4Writer thinks this is cause to abort() in native code, so it's very
+ // important that we just ignore the frame.
+ Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
+ return;
+ }
+
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
+ (int) (timestamp >> 32), (int) timestamp, transform));
+ }
+
+ /**
+ * Tells the video recorder what texture name to use. This is the external texture that
+ * we're receiving camera previews in. (Call from camera preview thread.)
+ *
+ * TODO: do something less clumsy
+ */
+ public void setTextureId(int id) {
+ synchronized (mReadyFence) {
+ if (!mReady) {
+ return;
+ }
+ }
+ mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
+ }
+
+ /**
+ * Encoder thread entry point. Establishes Looper/Handler and waits for messages.
+ *
+ * @see java.lang.Thread#run()
+ */
+ @Override
+ public void run() {
+ // Establish a Looper for this thread, and define a Handler for it.
+ Looper.prepare();
+ synchronized (mReadyFence) {
+ mHandler = new EncoderHandler(this);
+ mReady = true;
+ mReadyFence.notify();
+ }
+ Looper.loop();
+
+ Log.d(TAG, "Encoder thread exiting");
+ synchronized (mReadyFence) {
+ mReady = mRunning = false;
+ mHandler = null;
+ }
+ }
+
+ /**
+ * Handles encoder state change requests.
+ */
+ private static class EncoderHandler extends Handler {
+ private WeakReference
+ * If endOfStream is not set, this returns when there is no more data to drain. If it
+ * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
+ * Calling this with endOfStream set should be done once, right before stopping the muxer.
+ *
+ * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
+ * not recording audio.
+ */
+ private void drainEncoder(boolean endOfStream) {
+ final int TIMEOUT_USEC = 10000;
+ if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
+
+ if (endOfStream) {
+ if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
+ mEncoder.signalEndOfInputStream();
+ }
+
+ ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
+ while (true) {
+ int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
+ if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
+ // no output available yet
+ if (!endOfStream) {
+ break; // out of while
+ } else {
+ if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
+ }
+ } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ // not expected for an encoder
+ encoderOutputBuffers = mEncoder.getOutputBuffers();
+ } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ // should happen before receiving buffers, and should only happen once
+ if (mMuxerStarted) {
+ throw new RuntimeException("format changed twice");
+ }
+ MediaFormat newFormat = mEncoder.getOutputFormat();
+ Log.d(TAG, "encoder output format changed: " + newFormat);
+
+ // now that we have the Magic Goodies, start the muxer
+ mTrackIndex = mMuxer.addTrack(newFormat);
+ mMuxer.start();
+ mMuxerStarted = true;
+ } else if (encoderStatus < 0) {
+ Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
+ encoderStatus);
+ // let's ignore it
+ } else {
+ ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
+ if (encodedData == null) {
+ throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
+ " was null");
+ }
+
+ if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ // The codec config data was pulled out and fed to the muxer when we got
+ // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
+ if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
+ mBufferInfo.size = 0;
+ }
+
+ if (mBufferInfo.size != 0) {
+ if (!mMuxerStarted) {
+ throw new RuntimeException("muxer hasn't started");
+ }
+
+ // adjust the ByteBuffer values to match BufferInfo (not needed?)
+ encodedData.position(mBufferInfo.offset);
+ encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
+
+ mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
+ if (VERBOSE) {
+ Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
+ mBufferInfo.presentationTimeUs);
+ }
+ }
+
+ mEncoder.releaseOutputBuffer(encoderStatus, false);
+
+ if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ if (!endOfStream) {
+ Log.w(TAG, "reached end of stream unexpectedly");
+ } else {
+ if (VERBOSE) Log.d(TAG, "end of stream reached");
+ }
+ break; // out of while
+ }
+ }
+ }
+ }
+
+ /**
+ * Draws a box, with position offset.
+ */
+ private void drawBox(int posn) {
+ final int width = mInputWindowSurface.getWidth();
+ int xpos = (posn * 4) % (width - 50);
+ GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
+ GLES20.glScissor(xpos, 0, 100, 100);
+ GLES20.glClearColor(1.0f, 0.0f, 1.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
+ }
+}
diff --git a/src/com/android/grafika/TextureRender.java b/src/com/android/grafika/TextureRender.java
new file mode 100644
index 0000000..fe24bee
--- /dev/null
+++ b/src/com/android/grafika/TextureRender.java
@@ -0,0 +1,304 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Code for rendering an external texture image onto a surface using OpenGL ES 2.0.
+ */
+public class TextureRender {
+ private static final String TAG = MainActivity.TAG;
+
+ // Toggle this to show the "rebind" bug.
+ public volatile static boolean sWorkAroundContextProblem = true;
+
+ private static final int FLOAT_SIZE_BYTES = 4;
+ private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+ private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
+ private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
+ private final float[] mTriangleVerticesData = {
+ // X, Y, Z, U, V
+ -1.0f, -1.0f, 0, 0.f, 0.f,
+ 1.0f, -1.0f, 0, 1.f, 0.f,
+ -1.0f, 1.0f, 0, 0.f, 1.f,
+ 1.0f, 1.0f, 0, 1.f, 1.f,
+ };
+
+ private FloatBuffer mTriangleVertices;
+
+ private static final String VERTEX_SHADER =
+ "uniform mat4 uMVPMatrix;\n" +
+ "uniform mat4 uSTMatrix;\n" +
+ "attribute vec4 aPosition;\n" +
+ "attribute vec4 aTextureCoord;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "void main() {\n" +
+ " gl_Position = uMVPMatrix * aPosition;\n" +
+ " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+ "}\n";
+
+ private static final String FRAGMENT_SHADER =
+ "#extension GL_OES_EGL_image_external : require\n" +
+ "precision mediump float;\n" + // highp here doesn't seem to matter
+ "varying vec2 vTextureCoord;\n" +
+ "uniform samplerExternalOES sTexture;\n" +
+ "void main() {\n" +
+ " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+ "}\n";
+
+ private float[] mMVPMatrix = new float[16];
+ private float[] mSTMatrix = new float[16];
+
+ private int mProgram;
+ private int mTextureId = -12345;
+ private int muMVPMatrixHandle;
+ private int muSTMatrixHandle;
+ private int maPositionHandle;
+ private int maTextureHandle;
+
+ public TextureRender() {
+ mTriangleVertices = ByteBuffer.allocateDirect(
+ mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
+ .order(ByteOrder.nativeOrder()).asFloatBuffer();
+ mTriangleVertices.put(mTriangleVerticesData).position(0);
+
+ Matrix.setIdentityM(mSTMatrix, 0);
+ }
+
+ /**
+ * Returns the name of the external texture that will be used during rendering.
+ */
+ public int getTextureId() {
+ return mTextureId;
+ }
+
+ /**
+ * Changes the name of the external texture that will be used during rendering. The
+ * previous name will be deleted.
+ *
+ * Does not delete the previous texture name.
+ */
+ public void setTextureId(int textureId) {
+ if (mTextureId != textureId) {
+ Log.d(TAG, "Changing textureId from " + mTextureId + " to " + textureId);
+ mTextureId = textureId;
+ }
+ }
+
+ /**
+ * Draws the external texture onto the current EGL surface, using the transform matrix
+ * from surfaceTexture.
+ */
+ public void drawFrame(SurfaceTexture surfaceTexture) {
+ surfaceTexture.getTransformMatrix(mSTMatrix);
+ drawFrame(false);
+ }
+
+ /**
+ * Draws the external texture onto the current EGL surface, inverted, using the transform
+ * matrix from surfaceTexture.
+ */
+ public void drawFrameInverted(SurfaceTexture surfaceTexture) {
+ surfaceTexture.getTransformMatrix(mSTMatrix);
+ drawFrame(true);
+ }
+
+ /**
+ * Draws the external texture onto the current EGL surface, using the provided transform
+ * matrix.
+ */
+ public void drawFrame(float[] transformMatrix) {
+ System.arraycopy(transformMatrix, 0, mSTMatrix, 0, mSTMatrix.length);
+ drawFrame(false);
+ }
+
+ /**
+ * Internal frame draw; configure mSTMatrix before calling.
+ */
+ private void drawFrame(boolean invert) {
+ checkGlError("onDrawFrame start");
+ if (invert) {
+ mSTMatrix[5] = -mSTMatrix[5];
+ mSTMatrix[13] = 1.0f - mSTMatrix[13];
+ }
+
+ GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+
+ GLES20.glUseProgram(mProgram);
+ checkGlError("glUseProgram");
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ if (sWorkAroundContextProblem) {
+ // IMPORTANT: on some devices, if you are sharing the external texture between two
+ // contexts, one context may not see updates to the texture unless you un-bind and
+ // re-bind it. If you're not using shared EGL contexts, you don't need to bind
+ // texture 0 here.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ }
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
+
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
+ GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
+ TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maPosition");
+ GLES20.glEnableVertexAttribArray(maPositionHandle);
+ checkGlError("glEnableVertexAttribArray maPositionHandle");
+
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
+ GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
+ TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maTextureHandle");
+ GLES20.glEnableVertexAttribArray(maTextureHandle);
+ checkGlError("glEnableVertexAttribArray maTextureHandle");
+
+ Matrix.setIdentityM(mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
+
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ checkGlError("glDrawArrays");
+ }
+
+ /**
+ * Initializes GL state. Call this after the EGL surface has been created and made current.
+ */
+ public void surfaceCreated() {
+ mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
+ if (mProgram == 0) {
+ throw new RuntimeException("failed creating program");
+ }
+ maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+ checkGlError("glGetAttribLocation aPosition");
+ if (maPositionHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aPosition");
+ }
+ maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+ checkGlError("glGetAttribLocation aTextureCoord");
+ if (maTextureHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aTextureCoord");
+ }
+
+ muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+ checkGlError("glGetUniformLocation uMVPMatrix");
+ if (muMVPMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uMVPMatrix");
+ }
+
+ muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+ checkGlError("glGetUniformLocation uSTMatrix");
+ if (muSTMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uSTMatrix");
+ }
+
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+ checkGlError("glGenTextures");
+
+ mTextureId = textures[0];
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
+ checkGlError("glBindTexture mTextureID");
+
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+ GLES20.GL_NEAREST);
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+ GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+ GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+ GLES20.GL_CLAMP_TO_EDGE);
+ checkGlError("glTexParameter");
+ }
+
+ /**
+ * Replaces the fragment shader. Pass in null to reset to default.
+ */
+ public void changeFragmentShader(String fragmentShader) {
+ if (fragmentShader == null) {
+ fragmentShader = FRAGMENT_SHADER;
+ }
+ GLES20.glDeleteProgram(mProgram);
+ mProgram = createProgram(VERTEX_SHADER, fragmentShader);
+ if (mProgram == 0) {
+ throw new RuntimeException("failed creating program");
+ }
+ }
+
+ private int loadShader(int shaderType, String source) {
+ int shader = GLES20.glCreateShader(shaderType);
+ checkGlError("glCreateShader type=" + shaderType);
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compiled = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+ if (compiled[0] == 0) {
+ Log.e(TAG, "Could not compile shader " + shaderType + ":");
+ Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ shader = 0;
+ }
+ return shader;
+ }
+
+ private int createProgram(String vertexSource, String fragmentSource) {
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ if (vertexShader == 0) {
+ return 0;
+ }
+ int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ if (pixelShader == 0) {
+ return 0;
+ }
+
+ int program = GLES20.glCreateProgram();
+ checkGlError("glCreateProgram");
+ if (program == 0) {
+ Log.e(TAG, "Could not create program");
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ checkGlError("glAttachShader");
+ GLES20.glAttachShader(program, pixelShader);
+ checkGlError("glAttachShader");
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[1];
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Log.e(TAG, "Could not link program: ");
+ Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+ GLES20.glDeleteProgram(program);
+ program = 0;
+ }
+ return program;
+ }
+
+ public static void checkGlError(String op) {
+ int error;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ Log.e(TAG, op + ": glError " + error);
+ throw new RuntimeException(op + ": glError " + error);
+ }
+ }
+}
diff --git a/src/com/android/grafika/TextureViewGLActivity.java b/src/com/android/grafika/TextureViewGLActivity.java
new file mode 100644
index 0000000..2d73e63
--- /dev/null
+++ b/src/com/android/grafika/TextureViewGLActivity.java
@@ -0,0 +1,275 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.opengl.GLES20;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.TextureView;
+import android.view.View;
+import android.widget.Button;
+import android.app.Activity;
+import android.graphics.SurfaceTexture;
+
+/**
+ * Simple demonstration of using GLES to draw on a TextureView.
+ *
+ * Unlike GLSurfaceView, TextureView doesn't manage the EGL config or renderer thread, so we
+ * take care of that ourselves.
+ *
+ * Currently renders frames as fast as possible, without waiting for the consumer.
+ */
+public class TextureViewGLActivity extends Activity {
+ private static final String TAG = MainActivity.TAG;
+
+ // Experiment with allowing TextureView to release the SurfaceTexture from the callback vs.
+ // releasing it explicitly ourselves from the draw loop. The latter seems to be problematic
+ // in 4.4 (KK) -- set the flag to "false", rotate the screen a few times, then check the
+ // output of "adb shell ps -t | grep `pid grafika`".
+ //
+ // Must be static or it'll get reset on every Activity pause/resume.
+ private static volatile boolean sReleaseInCallback = true;
+
+ private TextureView mTextureView;
+ private Renderer mRenderer;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ Log.d(TAG, "onCreate");
+ super.onCreate(savedInstanceState);
+
+ // Start up the Renderer thread. It'll sleep until the TextureView is ready.
+ mRenderer = new Renderer();
+ mRenderer.start();
+
+ setContentView(R.layout.activity_texture_view_gl);
+ mTextureView = (TextureView) findViewById(R.id.glTextureView);
+ mTextureView.setSurfaceTextureListener(mRenderer);
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ updateControls();
+ }
+
+ @Override
+ protected void onDestroy() {
+ Log.d(TAG, "onDestroy");
+ super.onDestroy();
+ mRenderer.halt();
+ }
+
+ /**
+ * Updates the UI elements to match current state.
+ */
+ private void updateControls() {
+ Button toggleRelease = (Button) findViewById(R.id.toggleRelease_button);
+ int id = sReleaseInCallback ?
+ R.string.toggleReleaseCallbackOff : R.string.toggleReleaseCallbackOn;
+ toggleRelease.setText(id);
+ }
+
+ /**
+ * onClick handler for toggleRelease_button.
+ */
+ public void clickToggleRelease(View unused) {
+ sReleaseInCallback = !sReleaseInCallback;
+ updateControls();
+ }
+
+ /**
+ * Handles GL rendering and SurfaceTexture callbacks.
+ *
+ * We don't create a Looper, so the SurfaceTexture-by-way-of-TextureView callbacks
+ * happen on the UI thread.
+ */
+ private static class Renderer extends Thread implements TextureView.SurfaceTextureListener {
+ private Object mLock = new Object(); // guards mSurfaceTexture, mDone
+ private SurfaceTexture mSurfaceTexture;
+ private boolean mDone;
+
+ public Renderer() {
+ super("TextureViewGL Renderer");
+ }
+
+ @Override
+ public void run() {
+ while (true) {
+ SurfaceTexture surfaceTexture = null;
+
+ // Latch the SurfaceTexture when it becomes available. We have to wait for
+ // the TextureView to create it.
+ synchronized (mLock) {
+ while (!mDone && (surfaceTexture = mSurfaceTexture) == null) {
+ try {
+ mLock.wait();
+ } catch (InterruptedException ie) {
+ throw new RuntimeException(ie); // not expected
+ }
+ }
+ if (mDone) {
+ break;
+ }
+ }
+ Log.d(TAG, "Got surfaceTexture=" + surfaceTexture);
+
+ // Create an EGL surface for our new SurfaceTexture. We're not on the same
+ // thread as the SurfaceTexture, which is a concern for the *consumer*, which
+ // wants to call updateTexImage(). Because we're the *producer*, i.e. the
+ // one generating the frames, we don't need to worry about being on the same
+ // thread.
+ EglCore eglCore = new EglCore(null, EglCore.FLAG_RECORDABLE);
+ WindowSurface windowSurface = new WindowSurface(eglCore, mSurfaceTexture);
+ windowSurface.makeCurrent();
+
+ // Render frames until we're told to stop or the SurfaceTexture is destroyed.
+ doAnimation(windowSurface);
+
+ windowSurface.release();
+ eglCore.release();
+ if (!sReleaseInCallback) {
+ Log.i(TAG, "Releasing SurfaceTexture in renderer thread");
+ surfaceTexture.release();
+ }
+ }
+
+ Log.d(TAG, "Renderer thread exiting");
+ }
+
+ /**
+ * Draws updates as fast as the system will allow.
+ *
+ * In 4.4, with the synchronous queue, the frame rate will be limited. In previous
+ * releases, with the async queue, most of the frames we render will be dropped.
+ *
+ * The correct thing to do here is use Choreographer to schedule frame updates off
+ * of vsync, but that's not nearly as much fun.
+ */
+ private void doAnimation(WindowSurface eglSurface) {
+ final int BLOCK_WIDTH = 80;
+ final int BLOCK_SPEED = 2;
+ float clearColor = 0.0f;
+ int xpos = -BLOCK_WIDTH / 2;
+ int xdir = BLOCK_SPEED;
+ int width = eglSurface.getWidth();
+ int height = eglSurface.getHeight();
+
+ Log.d(TAG, "Animating " + width + "x" + height + " EGL surface");
+
+ while (true) {
+ // Check to see if the TextureView's SurfaceTexture is still valid.
+ SurfaceTexture surfaceTexture;
+ synchronized (mLock) {
+ surfaceTexture = mSurfaceTexture;
+ if (surfaceTexture == null) {
+ Log.d(TAG, "doAnimation exiting");
+ return;
+ }
+ }
+
+ // Still alive, render a frame.
+ GLES20.glClearColor(clearColor, clearColor, clearColor, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+
+ GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
+ GLES20.glScissor(xpos, height / 4, BLOCK_WIDTH, height / 2);
+ GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
+
+ // Publish the frame. If we overrun the consumer (which is likely), we will
+ // slow down due to back-pressure. If the consumer stops acquiring buffers,
+ // which will happen if the TextureView is paused, we will get stuck here
+ // until the SurfaceTexture is released.
+ //
+ // TODO: investigate whether this behavior is different in 4.3 vs. 4.4
+ eglSurface.swapBuffers();
+
+ // Advance state
+ clearColor += 0.015625f;
+ if (clearColor > 1.0f) {
+ clearColor = 0.0f;
+ }
+ xpos += xdir;
+ if (xpos <= -BLOCK_WIDTH / 2 || xpos >= width - BLOCK_WIDTH / 2) {
+ Log.d(TAG, "change direction");
+ xdir = -xdir;
+ }
+ }
+ }
+
+ /**
+ * Tells the thread to stop running.
+ */
+ public void halt() {
+ synchronized (mLock) {
+ mDone = true;
+ mLock.notify();
+ }
+ }
+
+ @Override // will be called on UI thread
+ public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
+ Log.d(TAG, "onSurfaceTextureAvailable(" + width + "x" + height + ")");
+ synchronized (mLock) {
+ mSurfaceTexture = surface;
+ mLock.notify();
+ }
+ }
+
+ @Override // will be called on UI thread
+ public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
+ Log.d(TAG, "onSurfaceTextureSizeChanged(" + width + "x" + height + ")");
+ // TODO: ?
+ }
+
+ @Override // will be called on UI thread
+ public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
+ Log.d(TAG, "onSurfaceTextureDestroyed");
+
+ // We set the SurfaceTexture reference to null to tell the Renderer thread that
+ // it needs to stop. The renderer might be in the middle of drawing, so we want
+ // to return false here so that the caller doesn't try to release the ST out
+ // from under us.
+ //
+ // In theory.
+ //
+ // In 4.4, the buffer queue was changed to be synchronous, which means we block
+ // in dequeueBuffer(). If the renderer has been running flat out and is currently
+ // sleeping in eglSwapBuffers(), it's going to be stuck there until somebody
+ // tears down the SurfaceTexture. So we need to tear it down here to ensure
+ // that the renderer thread will break. If we don't, the thread sticks there
+ // forever.
+ //
+ // The only down side to releasing it here is we'll get some complaints in logcat
+ // when eglSwapBuffers() fails.
+ synchronized (mLock) {
+ mSurfaceTexture = null;
+ }
+ if (sReleaseInCallback) {
+ Log.i(TAG, "Allowing TextureView to release SurfaceTexture");
+ }
+ return sReleaseInCallback;
+ }
+
+ @Override // will be called on UI thread
+ public void onSurfaceTextureUpdated(SurfaceTexture surface) {
+ //Log.d(TAG, "onSurfaceTextureUpdated");
+ }
+ }
+}
diff --git a/src/com/android/grafika/WindowSurface.java b/src/com/android/grafika/WindowSurface.java
new file mode 100644
index 0000000..08f4f47
--- /dev/null
+++ b/src/com/android/grafika/WindowSurface.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.graphics.SurfaceTexture;
+import android.view.Surface;
+
+/**
+ * Recordable EGL window surface.
+ *
+ * It's good practice to explicitly release() the surface, preferably from a "finally" block.
+ * This object owns the Surface; releasing this object will release the Surface as well.
+ */
+public class WindowSurface extends EglSurfaceBase {
+ private Surface mSurface;
+
+ /**
+ * Associates an EGL surface with the native window surface. The Surface will be
+ * owned by WindowSurface, and released when release() is called.
+ */
+ public WindowSurface(EglCore eglBase, Surface surface) {
+ super(eglBase);
+ createWindowSurface(surface);
+ mSurface = surface;
+ }
+
+ /**
+ * Associates an EGL surface with the SurfaceTexture.
+ */
+ public WindowSurface(EglCore eglBase, SurfaceTexture surfaceTexture) {
+ super(eglBase);
+ createWindowSurface(surfaceTexture);
+ }
+
+ /**
+ * Releases any resources associated with the Surface and the EGL surface.
+ */
+ public void release() {
+ releaseEglSurface();
+ if (mSurface != null) {
+ mSurface.release();
+ mSurface = null;
+ }
+ }
+
+ /**
+ * Recreate the EGLSurface, using the new EglBase. The caller should have already
+ * freed the old EGLSurface with releaseEglSurface().
+ *
+ * This is useful when we want to update the EGLSurface associated with a Surface.
+ * For example, if we want to share with a different EGLContext, which can only
+ * be done by tearing down and recreating the context. (That's handled by the caller;
+ * this just creates a new EGLSurface for the Surface we were handed earlier.)
+ *
+ * If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a
+ * context somewhere, the create call will fail with complaints from the Surface
+ * about already being connected.
+ */
+ public void recreate(EglCore newEglBase) {
+ if (mSurface == null) {
+ throw new RuntimeException("not yet implemented for SurfaceTexture");
+ }
+ mEglBase = newEglBase; // switch to new context
+ createWindowSurface(mSurface); // create new surface
+ }
+}
diff --git a/src/com/android/grafika/WorkDialog.java b/src/com/android/grafika/WorkDialog.java
new file mode 100644
index 0000000..ea7bb53
--- /dev/null
+++ b/src/com/android/grafika/WorkDialog.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2013 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.grafika;
+
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.util.Log;
+import android.view.InflateException;
+import android.view.View;
+
+/**
+ * Utility functions for work_dialog.
+ */
+public class WorkDialog {
+ private static final String TAG = MainActivity.TAG;
+
+ private WorkDialog() {} // nah
+
+ /**
+ * Prepares an alert dialog builder, using the work_dialog view.
+ *
+ * The caller should finish populating the builder, then call AlertDialog.Builder#show().
+ */
+ public static AlertDialog.Builder create(Activity activity, int titleId) {
+ View view;
+ try {
+ view = activity.getLayoutInflater().inflate(R.layout.work_dialog, null);
+ } catch (InflateException ie) {
+ Log.e(TAG, "Exception while inflating work dialog layout: " + ie.getMessage());
+ throw ie;
+ }
+
+ String title = activity.getString(titleId);
+ AlertDialog.Builder builder = new AlertDialog.Builder(activity);
+ builder.setTitle(title);
+ builder.setView(view);
+ return builder;
+ }
+}
+ * 0 1 2 3
+ * 7 6 5 4
+ *
+ * We draw one of the eight rectangles and leave the rest set to the clear color.
+ */
+ private void generateFrame(int frameIndex) {
+ frameIndex %= 8;
+
+ int startX, startY;
+ if (frameIndex < 4) {
+ // (0,0) is bottom-left in GL
+ startX = frameIndex * (WIDTH / 4);
+ startY = HEIGHT / 2;
+ } else {
+ startX = (7 - frameIndex) * (WIDTH / 4);
+ startY = 0;
+ }
+
+ GLES20.glClearColor(TEST_R0 / 255.0f, TEST_G0 / 255.0f, TEST_B0 / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+
+ GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
+ GLES20.glScissor(startX, startY, WIDTH / 4, HEIGHT / 2);
+ GLES20.glClearColor(TEST_R1 / 255.0f, TEST_G1 / 255.0f, TEST_B1 / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
+ }
+
+ /**
+ * Generates the presentation time for frame N, in nanoseconds.
+ *