Question

Hi I am asking this question with reference to the code at https://github.com/google/grafika .

I am trying to re-initialize my encoder during orientation change in order to adjust the aspect ratio of encoded frames..

UPDATE:

Here is my GLSurfaceView.Renderer class

class CameraSurfaceRenderer implements GLSurfaceView.Renderer {
        private static final String TAG = MainActivity.TAG;
        private static final boolean VERBOSE = false;
        private static final boolean ROSE_COLORED_GLASSES = false;   // experiment

        private MainActivity.CameraHandler mCameraHandler;
        private TextureMovieEncoder mVideoEncoder;
        private File mOutputFile;
        TextView tv;

        private TextureRender mTextureRender;
        private SurfaceTexture mSurfaceTexture;
        private boolean mRecordingEnabled;
        Context cntex;


        public CameraSurfaceRenderer(MainActivity.CameraHandler cameraHandler, TextureMovieEncoder movieEncoder, File outputFile, TextView tx, Context c) {

            mOutputFile = outputFile;
            mVideoEncoder = movieEncoder;

            mRecordingEnabled = false;
            mCameraHandler = cameraHandler;
            tv = tx;
            cntex = c;
        }


        public void notifyPausing() {
            if (mSurfaceTexture != null) {
                Log.d(TAG, "renderer pausing -- releasing SurfaceTexture");
                mSurfaceTexture.release();
                mSurfaceTexture = null;
            }
        }


        public void changeRecordingState(boolean isRecording) {
            Log.d(TAG, "changeRecordingState: was " + mRecordingEnabled + " now " + isRecording);
            mRecordingEnabled = isRecording;
        }

        @Override
        public void onSurfaceCreated(GL10 unused, EGLConfig config) {
            System.out.println("onSurfaceCreated start");
            Log.d(TAG, "onSurfaceCreated");


            mTextureRender = new TextureRender(cntex);
            mTextureRender.surfaceCreated();

            if (ROSE_COLORED_GLASSES) {
                String rosyFragment =
                        "#extension GL_OES_EGL_image_external : require\n" +
                        "precision mediump float;\n" +
                        "varying vec2 vTextureCoord;\n" +
                        "uniform samplerExternalOES sTexture;\n" +
                        "void main() {\n" +
                        "    vec4 tc = texture2D(sTexture, vTextureCoord);\n" +
                        "    gl_FragColor.r = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" +
                        "}\n";
                // assign value to gl_FragColor.g and .b as well to get simple B&W

                mTextureRender.changeFragmentShader(rosyFragment);
            }


            mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());

            // Tell the UI thread to enable the camera preview.
            mCameraHandler.sendMessage(mCameraHandler.obtainMessage(
                    MainActivity.CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));


            if (cntex.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {        
                mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
                        mOutputFile, 480, 640, 1400000, EGL14.eglGetCurrentContext()));
            } else {
                 mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
                            mOutputFile, 640, 480, 1400000, EGL14.eglGetCurrentContext()));
            }

            mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext());
            System.out.println("onSurfaceCreated end");
        }
        boolean is_stpd = false;
        int count = 0;
        @Override
        public void onSurfaceChanged(GL10 unused, int width, int height) {
            System.out.println("onSurfaceChanged start");
            Log.d(TAG, "onSurfaceChanged " + width + "x" + height);
            GLES20.glViewport(0, 0, width, height);
            mCameraHandler.sendEmptyMessage(2); // to display toast message of surface change.
            int orientation = cntex.getResources().getConfiguration().orientation;
//          System.out.println("onSurfaceChanged before reinit");
            mVideoEncoder.encoder_reinitialisation(EGL14.eglGetCurrentContext(), orientation);
//          System.out.println("onSurfaceChanged after reinit");
            System.out.println("onSurfaceChanged end");

        }
        int _frm_cnt = 0;
        double _strt_tm = 0;
        @Override
        public void onDrawFrame(GL10 unused) {
            System.out.println("onDrawFrame start");

            if (VERBOSE) Log.d(TAG, "onDrawFrame tex=" + mTextureRender.getTextureId());
            ++_frm_cnt;
            if(_frm_cnt == 1) {
                _strt_tm  = System.currentTimeMillis();
            } 
            if((System.currentTimeMillis() - _strt_tm) >= 1000) {
                System.out.println("fps = " + _frm_cnt );
                //tx.setText("fps = " + Integer.toString(frm_cnt));                     
                _frm_cnt = 0;
            }

            mSurfaceTexture.updateTexImage();

//          
            mVideoEncoder.setTextureId(mTextureRender.getTextureId());
    //
            if(mVideoEncoder.is_prepared == true) {
    //          // This will be ignored if we're not actually recording.
                mVideoEncoder.frameAvailable(mSurfaceTexture);
            }

            // Draw the video frame.
            mTextureRender.drawFrame(mSurfaceTexture);


            System.out.println("onDrawFrame end");
        }

    }

Here is my TextureMovieEncoder class

public class TextureMovieEncoder implements Runnable {
    private static final String TAG = MainActivity.TAG;
    private static final boolean VERBOSE = false;

    private static final String MIME_TYPE = "video/avc";    // H.264 Advanced Video Coding
    private static final int FRAME_RATE = 5;               // 30fps
    private static final int IFRAME_INTERVAL = 1;           // 5 seconds between I-frames

    private static final int MSG_START_RECORDING = 0;
    private static final int MSG_STOP_RECORDING = 1;
    private static final int MSG_FRAME_AVAILABLE = 2;
    private static final int MSG_SET_TEXTURE_ID = 3;
    private static final int MSG_UPDATE_SHARED_CONTEXT = 4;
    private static final int MSG_QUIT = 5;
    private static final int MSG_REINIT = 6;

    // ----- accessed by encoder thread -----
    private EglCore mEglBase;
    private WindowSurface mInputWindowSurface;
    private MediaMuxer mMuxer;
    private MediaCodec mEncoder;
    private MediaCodec.BufferInfo mBufferInfo;
    private int mTrackIndex;
    private boolean mMuxerStarted;
    private TextureRender mTextureRender;
    private int mTextureId;
    private int mFrameNum;
    File enc_file = new File(Environment.getExternalStorageDirectory().getPath() + "/encoded_preview.webm");
    FileOutputStream fp_enc = null;
    PrintWriter enc_len = null;
    // ----- accessed by multiple threads -----
    private volatile EncoderHandler mHandler;

    private Object mReadyFence = new Object();      // guards ready/running
    private boolean mReady;
    public boolean mRunning;
    public boolean is_prepared = false;






    public TextureMovieEncoder(Context cntxt) {
        context = cntxt;
    }


    public static class EncoderConfig {
        final File mOutputFile;
        final int mWidth;
        final int mHeight;
        final int mBitRate;
        final EGLContext mEglContext;

        public EncoderConfig(File outputFile, int width, int height, int bitRate,
                EGLContext sharedEglContext) {
            System.out.println("EncoderConfig start");
            mOutputFile = outputFile;
            mWidth = width;
            mHeight = height;
            mBitRate = bitRate;
            mEglContext = sharedEglContext;
            System.out.println("EncoderConfig end");
        }

        @Override
        public String toString() {
            return "EncoderConfig: " + mWidth + "x" + mHeight + " @" + mBitRate +
                    " to '" + mOutputFile.toString() + "' ctxt=" + mEglContext;
        }
    }


    public void startRecording(EncoderConfig config) {
        System.out.println("startRecording start");
        Log.d(TAG, "Encoder: startRecording()");
        synchronized (mReadyFence) {
            if (mRunning) {
                Log.w(TAG, "Encoder thread already running");
                return;
            }
            mRunning = true;
            new Thread(this, "TextureMovieEncoder").start();
            while (!mReady) {
                try {
                    mReadyFence.wait();
                } catch (InterruptedException ie) {
                    // ignore
                }
            }
        }

        mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
        System.out.println("startRecording end");
    }
    int orientation_local = -1;
    public void encoder_reinitialisation(EGLContext eglContext, int orientation) {
        System.out.println("encoder_reinitialisation start");
        is_prepared = false;
        System.out.println("encoder_reinitialisation before message oriebta = " + orientation);
        mHandler.sendMessage(mHandler.obtainMessage(MSG_REINIT, eglContext));
        System.out.println("encoder_reinitialisation after message");
        orientation_local = orientation;
        System.out.println("encoder_reinitialisation end");
    }


    public void stopRecording() {
        System.out.println("stopRecording start");
        if(mHandler != null) {
            mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
            mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
        }
        // We don't know when these will actually finish (or even start).  We don't want to
        // delay the UI thread though, so we return immediately.
        System.out.println("stopRecording end");
    }

    /**
     * Returns true if recording has been started.
     */
    public boolean isRecording() {
        synchronized (mReadyFence) {
            return mRunning;
        }
    }


    public void updateSharedContext(EGLContext sharedContext) {
        mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, sharedContext));
    }


    public void frameAvailable(SurfaceTexture st) {
        System.out.println("frameAvailable start");
        synchronized (mReadyFence) {

            if (!mReady) {
                return;
            }
        }

        float[] transform = new float[16];      // TODO - avoid alloc every frame
        st.getTransformMatrix(transform);
        long timestamp = st.getTimestamp();
        if (timestamp == 0) {

            Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
            return;
        }

        mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
                (int) (timestamp >> 32), (int) timestamp, transform));
        System.out.println("frameAvailable end");
    }


    public void setTextureId(int id) {
        System.out.println("setTextureId start");
        synchronized (mReadyFence) {
            if (!mReady) {
                return;
            }
        }
        mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
        System.out.println("setTextureId end");
    }


    @Override
    public void run() {
        System.out.println("run start");
        // Establish a Looper for this thread, and define a Handler for it.
        Looper.prepare();
        synchronized (mReadyFence) {
            mHandler = new EncoderHandler(this);
            mReady = true;
            mReadyFence.notify();
        }
        Looper.loop();

        Log.d(TAG, "Encoder thread exiting");
        synchronized (mReadyFence) {
            mReady = mRunning = false;
            mHandler = null;
            System.out.println("run end");
        }
    }

    /**
     * Handles encoder state change requests.
     */
    private static class EncoderHandler extends Handler {
        private WeakReference<TextureMovieEncoder> mWeakEncoder;

        public EncoderHandler(TextureMovieEncoder encoder) {
            mWeakEncoder = new WeakReference<TextureMovieEncoder>(encoder);
        }

        @Override
        public void handleMessage(Message inputMessage) {
            int what = inputMessage.what;
            Object obj = inputMessage.obj;

            TextureMovieEncoder encoder = mWeakEncoder.get();
            if (encoder == null) {
                Log.w(TAG, "EncoderHandler.handleMessage: encoder is null");
                return;
            }

            switch (what) {
                case MSG_START_RECORDING:
                    encoder.handleStartRecording((EncoderConfig) obj);
                    break;
                case MSG_REINIT:
                    encoder.encoder_reinit((EGLContext) inputMessage.obj);
                    break;
                case MSG_STOP_RECORDING:
                    encoder.handleStopRecording();
                    break;
                case MSG_FRAME_AVAILABLE:
                    long timestamp = (((long) inputMessage.arg1) << 32) |
                            (((long) inputMessage.arg2) & 0xffffffffL);
                    encoder.handleFrameAvailable((float[]) obj, timestamp);
                    break;
                case MSG_SET_TEXTURE_ID:
                    encoder.handleSetTexture(inputMessage.arg1);
                    break;
                case MSG_UPDATE_SHARED_CONTEXT:
                    encoder.handleUpdateSharedContext((EGLContext) inputMessage.obj);
                    break;
                case MSG_QUIT:
                    Looper.myLooper().quit();
                    break;
                default:
                    throw new RuntimeException("Unhandled msg what=" + what);
            }
        }
    }

    /**
     * Start recording.
     */
    private void handleStartRecording(EncoderConfig config) {
        Log.d(TAG, "handleStartRecording " + config);
        mFrameNum = 0;
        prepareEncoder(config.mEglContext, config.mWidth, config.mHeight, config.mBitRate,
                config.mOutputFile);
    }

    private void encoder_reinit(EGLContext obj) {
        System.out.println("encoder_reinit start ");

        drainEncoder(true);
        releaseEncoder();

        prepareEncoder(obj, video_width, video_height, 1400000,
                null);
        System.out.println("encoder_reinit end ");
    }

    private void handleFrameAvailable(float[] transform, long timestamp) {
        System.out.println("handleFrameAvailable start");
        if (VERBOSE) Log.d(TAG, "handleFrameAvailable tr=" + transform);
        if(is_prepared == true) {
            drainEncoder(false);
            mTextureRender.setTextureId(mTextureId);
            mTextureRender.drawFrame(transform);

            mInputWindowSurface.setPresentationTime(timestamp);
            mInputWindowSurface.swapBuffers();
        }
        System.out.println("handleFrameAvailable end");
    }

    private void handleStopRecording() {
        Log.d(TAG, "handleStopRecording");
        drainEncoder(true);
        releaseEncoder();
    }

    private void handleSetTexture(int id) {
        //Log.d(TAG, "handleSetTexture " + id);
        mTextureId = id;
    }

    /**
     * Tears down the EGL surface and context we've been using to feed the MediaCodec input
     * surface, and replaces it with a new one that shares with the new context.
     */
    private void handleUpdateSharedContext(EGLContext newSharedContext) {
        System.out.println("handleUpdateSharedContext start");
        Log.d(TAG, "handleUpdatedSharedContext " + newSharedContext);

        // Release the EGLSurface and EGLContext.
        if(mInputWindowSurface != null) {
        mInputWindowSurface.releaseEglSurface();
        mEglBase.release();
        }

        // Create a new EGLContext and recreate the window surface.
        mEglBase = new EglCore(newSharedContext, EglCore.FLAG_RECORDABLE);
        mInputWindowSurface.recreate(mEglBase);
        mInputWindowSurface.makeCurrent();

        // Create new programs and such for the new context.
        mTextureRender.surfaceCreated();
        System.out.println("handleUpdateSharedContext end");
    }
    boolean created =false;
    EGLContext sharedContext_local;
    private Context context;


    private void prepareEncoder(EGLContext sharedContext, int width, int height, int bitRate,
            File outputFile) {
        System.out.println("prepareEncoder start width = " + width + "height = " + height);
        sharedContext_local = sharedContext;
        enc_strm = new byte[width * height * 3 / 2];
        encoded_data_buffer = new ofi_vc_buffer();
        video_width = width;
        video_height = height;
        if(!created) {
            try {
                fp_enc = new FileOutputStream(enc_file);
            } catch (FileNotFoundException e) {
                e.printStackTrace();
            }

            try {
                enc_len  = new PrintWriter(Environment.getExternalStorageDirectory().getPath() + "/encoded_len.xls");

            } catch (FileNotFoundException e) {
                e.printStackTrace();
            }
            created = true;
        }
        mBufferInfo = new MediaCodec.BufferInfo();

        MediaFormat format = null;
        if(orientation_local == 1) {
            System.out.println("videoformatting portrait");
            format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
        } else {
            System.out.println("videoformatting landscape");
            format = MediaFormat.createVideoFormat(MIME_TYPE, height, width);
        }
        // Set some properties.  Failing to specify some of these can cause the MediaCodec
        // configure() call to throw an unhelpful exception.
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
        if (VERBOSE) Log.d(TAG, "format: " + format);


        mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
        mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mEglBase = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE);
        mInputWindowSurface = new WindowSurface(mEglBase, mEncoder.createInputSurface());
        mEncoder.start();

        mInputWindowSurface.makeCurrent();
        mTextureRender = new TextureRender(context);
        mTextureRender.surfaceCreated();



        mTrackIndex = -1;
        mMuxerStarted = false;
        is_prepared = true;
        System.out.println("prepareEncoder end");
    }

    /**
     * Releases encoder resources.
     */
    private void releaseEncoder() {
         System.out.println("releaseEncoder end");
        if (VERBOSE) Log.d(TAG, "releasing encoder objects");
        if (mEncoder != null) {
            mEncoder.stop();
            mEncoder.release();
            mEncoder = null;
        }
        if (mInputWindowSurface != null) {
            mInputWindowSurface.release();
            mInputWindowSurface = null;
        }
        if (mEglBase != null) {
            mEglBase.release();
            mEglBase = null;
        }
        if (mMuxer != null) {
            mMuxer.stop();
            mMuxer.release();
            mMuxer = null;
        }
        System.out.println("releaseEncoder start");
    }
    byte[] enc_strm = null;
    byte sps_pps_nal[] = null;
    int sps_pps_nal_size = 0;
    ofi_vc_buffer encoded_data_buffer = null;
    private int encod_len = 0;
    private int frame_type;
    private encoded_stream_info enc_buffer_global;
    private int video_width;
    private int video_height;

    private void drainEncoder(boolean endOfStream) {
        System.out.println("drainEncoder start");
        final int TIMEOUT_USEC = 10000;
        if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");

        if (endOfStream) {
            if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
            mEncoder.signalEndOfInputStream();
        }
        //encoded_stream_info enc_buffer = new encoded_stream_info(video_width * video_height * 3 / 2);
        ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
        while (true) {
            int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
            if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // no output available yet
                if (!endOfStream) {
                    break;      // out of while
                } else {
                    if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
                }
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                // not expected for an encoder
                encoderOutputBuffers = mEncoder.getOutputBuffers();
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                // should happen before receiving buffers, and should only happen once
                if (mMuxerStarted) {
                    throw new RuntimeException("format changed twice");
                }
                MediaFormat newFormat = mEncoder.getOutputFormat();
                Log.d(TAG, "encoder output format changed: " + newFormat);

                // now that we have the Magic Goodies, start the muxer
//                mTrackIndex = mMuxer.addTrack(newFormat);
//                mMuxer.start();
//                mMuxerStarted = true;
            } else if (encoderStatus < 0) {
                Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
                        encoderStatus);
                // let's ignore it
            } else {
                ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                if (encodedData == null) {
                    throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
                            " was null");
                }

                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                    // The codec config data was pulled out and fed to the muxer when we got
                    // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                    if (VERBOSE) Log.d("Main", "ignoring BUFFER_FLAG_CODEC_CONFIG");
                    encodedData.position(mBufferInfo.offset);
                    encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

                    System.out.println("Encode SPS PPS buffer size" + mBufferInfo.size );

                    sps_pps_nal_size = mBufferInfo.size;
                    sps_pps_nal = new byte[sps_pps_nal_size];
                    encodedData.get(sps_pps_nal, 0, sps_pps_nal_size);
                    mBufferInfo.size = 0;
                }

                if (mBufferInfo.size != 0) {
//                    if (!mMuxerStarted) {
//                        throw new RuntimeException("muxer hasn't started");
//                    }

                    // adjust the ByteBuffer values to match BufferInfo (not needed?)
                    encodedData.position(mBufferInfo.offset);
                    encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
                    encodedData.get(enc_strm, sps_pps_nal_size, mBufferInfo.size);

                    System.arraycopy(sps_pps_nal, 0, enc_strm, 0, sps_pps_nal_size);   

                    encod_len  = mBufferInfo.size + sps_pps_nal_size;
                    if ((enc_strm[sps_pps_nal_size + 4] & 0x1F)== 5) {

                           frame_type = 2;
                       } else {

                           frame_type = 0;
                       }
                    //mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                    if (VERBOSE) {
                        Log.d("Main", "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
                                mBufferInfo.presentationTimeUs);
                    }
                    enc_buffer_global.encoded_len = 0;  // frame sending is disabled.
                    if(enc_buffer_global.encoded_len == 0) {

                        enc_buffer_global.encoded_data = enc_strm;
                        enc_buffer_global.encoded_len  = encod_len;
                        enc_buffer_global.frame_type   = frame_type;
                       // System.out.println("encoded Wrote stream len =" + enc_buffer_global.encoded_len);

                    try {
                        fp_enc.write(enc_strm, 0, encod_len);
                    } catch (IOException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                    try {
                        fp_enc.flush();
                    } catch (IOException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }

                    enc_len.format("%d\n", encod_len);
                    enc_len.flush();
                    }
                }

                mEncoder.releaseOutputBuffer(encoderStatus, false);

                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    if (!endOfStream) {
                        Log.w(TAG, "reached end of stream unexpectedly");
                    } else {
                        if (VERBOSE) Log.d(TAG, "end of stream reached");
                    }
                    break;      // out of while
                }

            }
        }
        System.out.println("drainEncoder end");
    }

    public void set_buffer(encoded_stream_info bufer) {
        enc_buffer_global = bufer;      
    }
}

Other than these I have added

EglCore.java

EglSurfaceBase.java

TextureRender.java

WindowSurface.java

classes from the grafika link given above.

Still I am not able to figure out the reason for my app getting freezed up during re-initialization. But sometimes it works also.

Can anyone help....

Else what are the steps I should do to change the encoding resolution.

Thanks in advance...........

Was it helpful?

Solution

I got a working one with the following code....

public void onConfigurationChanged(Configuration newConfig) {
        System.out.println("On config change start ");
        super.onConfigurationChanged(newConfig);    
           mGLView.queueEvent(new Runnable() {
                @Override public void run() {
                    // Tell the renderer that it's about to be paused so it can clean up.
                    mRenderer.notifyPausing();
                }
            });

            mGLView.queueEvent(new Runnable() {
                @Override public void run() {
                    mRenderer.re_init();
                }
            });
}

where re-init function is.

public void re_init() {
            mTextureRender = new TextureRender(cntex);
            mTextureRender.surfaceCreated();

            if (ROSE_COLORED_GLASSES) {
                String rosyFragment =
                        "#extension GL_OES_EGL_image_external : require\n" +
                        "precision mediump float;\n" +
                        "varying vec2 vTextureCoord;\n" +
                        "uniform samplerExternalOES sTexture;\n" +
                        "void main() {\n" +
                        "    vec4 tc = texture2D(sTexture, vTextureCoord);\n" +
                        "    gl_FragColor.r = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" +
                        "}\n";

                mTextureRender.changeFragmentShader(rosyFragment);
            }


            mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());

            mCameraHandler.sendMessage(mCameraHandler.obtainMessage(
                    MainActivity.CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));


            if (cntex.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {        
                mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
                        mOutputFile, 640, 480, 1400000, EGL14.eglGetCurrentContext()));
            } else {

                 mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
                            mOutputFile, 480, 640, 1400000, EGL14.eglGetCurrentContext()));
            }

            //mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext());

        }

and in mRenderer.notifyPausing() I have added videoEncoder.stopRecording() call also which will wait untill the whole recorder is stopped (did an object based synchronisation there).

But the whole re-initialisation takes 250 - 400 ms................

Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top