Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fixes #24 #56

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 46 additions & 32 deletions src/com/android/grafika/ContinuousCaptureActivity.java
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,8 @@ public class ContinuousCaptureActivity extends Activity implements SurfaceHolder
private MainHandler mHandler;
private float mSecondsOfVideo;

private static SurfaceHolder sSurfaceHolder;

/**
* Custom message handler for main UI thread.
* <p>
Expand Down Expand Up @@ -181,6 +183,8 @@ protected void onResume() {
// Ideally, the frames from the camera are at the same resolution as the input to
// the video encoder so we don't have to scale.
openCamera(VIDEO_WIDTH, VIDEO_HEIGHT, DESIRED_PREVIEW_FPS);

if(sSurfaceHolder != null) setUp();
}

@Override
Expand Down Expand Up @@ -348,17 +352,47 @@ private void updateBufferStatus(long durationUsec) {

@Override // SurfaceHolder.Callback
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "surfaceCreated holder=" + holder);

// Set up everything that requires an EGL context.
//
// We had to wait until we had a surface because you can't make an EGL context current
// without one, and creating a temporary 1x1 pbuffer is a waste of time.
//
// The display surface that we use for the SurfaceView, and the encoder surface we
// use for video, use the same EGL context.
Log.d(TAG, "surfaceCreated holder=" + holder + " (static=" + sSurfaceHolder + ")");
if (sSurfaceHolder != null) {
throw new RuntimeException("sSurfaceHolder is already set");
}

sSurfaceHolder = holder;
}

@Override // SurfaceHolder.Callback
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG, "surfaceChanged fmt=" + format + " size=" + width + "x" + height +
" holder=" + holder);
setUp();
updateControls();
}

@Override // SurfaceHolder.Callback
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "surfaceDestroyed holder=" + holder);
sSurfaceHolder = null;
}

@Override // SurfaceTexture.OnFrameAvailableListener; runs on arbitrary thread
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
//Log.d(TAG, "frame available");
mHandler.sendEmptyMessage(MainHandler.MSG_FRAME_AVAILABLE);
}

/**
* Set up everything that requires an EGL context.
* We had to wait until we had a surface because you can't make an EGL context current
* without one, and creating a temporary 1x1 pbuffer is a waste of time.
*
* The display surface that we use for the SurfaceView, and the encoder surface we
* use for video, use the same EGL context.
*/
private void setUp() {
assert sSurfaceHolder != null;

mEglCore = new EglCore(null, EglCore.FLAG_RECORDABLE);
mDisplaySurface = new WindowSurface(mEglCore, holder.getSurface(), false);
mDisplaySurface = new WindowSurface(mEglCore, sSurfaceHolder.getSurface(), false);
mDisplaySurface.makeCurrent();

mFullFrameBlit = new FullFrameRect(
Expand All @@ -374,36 +408,16 @@ public void surfaceCreated(SurfaceHolder holder) {
throw new RuntimeException(ioe);
}
mCamera.startPreview();

// TODO: adjust bit rate based on frame rate?
// TODO: adjust video width/height based on what we're getting from the camera preview?
// (can we guarantee that camera preview size is compatible with AVC video encoder?)
// (can we guarantee that camera preview size is compatible with AVC video encoder?)
try {
mCircEncoder = new CircularEncoder(VIDEO_WIDTH, VIDEO_HEIGHT, 6000000,
mCameraPreviewThousandFps / 1000, 7, mHandler);
- mCameraPreviewThousandFps / 1000, 7, mHandler);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
mEncoderSurface = new WindowSurface(mEglCore, mCircEncoder.getInputSurface(), true);

updateControls();
}

@Override // SurfaceHolder.Callback
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG, "surfaceChanged fmt=" + format + " size=" + width + "x" + height +
" holder=" + holder);
}

@Override // SurfaceHolder.Callback
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "surfaceDestroyed holder=" + holder);
}

@Override // SurfaceTexture.OnFrameAvailableListener; runs on arbitrary thread
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
//Log.d(TAG, "frame available");
mHandler.sendEmptyMessage(MainHandler.MSG_FRAME_AVAILABLE);
}

/**
Expand Down