当前位置:首页 » 《关注互联网》 » 正文

使用OpenGL同时对两路摄像头进行录像(动态水印)、并且两路录像文件都带音轨_雪飘碧鸳的博客

5 人参与  2021年09月05日 07:43  分类 : 《关注互联网》  评论

点击全文阅读


本文介绍在Android平台下,使用OpenGL同时对两个摄像头同时预览、录像,并且录像文件均带音轨,并将录像文件(带声音)、音频文件保存下来。预览与录像均添加时间水印:

先放一个demo效果图:

1.先初始化两个摄像头预览:


import android.content.Context;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

import com.reach.mutilcamerarecord.MainActivity;
import com.reach.mutilcamerarecord.media.base.RecordManageBase;
import com.reach.mutilcamerarecord.media.encode.IAudioListener;
import com.reach.mutilcamerarecord.media.manager.RecordVideoAndAudioManager;
import com.reach.mutilcamerarecord.media.setting.CameraSetting;
import com.reach.mutilcamerarecord.media.setting.RecordSetting;
import com.reach.mutilcamerarecord.media.setting.RenderSetting;

import java.io.File;
import java.io.IOException;

/**
 * 本地内置摄像头
 */
public class GLRecordManager {
    private MainActivity mActivity;
    private Context mContext;
    //view
    private SurfaceView surfaceView;
    private SurfaceHolder holder;
    private RecordVideoAndAudioManager recorder;

    private final static int TargetLongWidth = 1280;//480;//640;//1920;   这个是录像分辨率
    private int TargetShortWidth = 720;//320;//480;//1080;

    private int cameraWidth = 1280;//320;//640;//这个是摄像头原始数据
    private int cameraHeight = 720;//240;//480;


    public GLRecordManager(MainActivity activity, Context context, SurfaceView sv){
        mActivity = activity;
        mContext = context;
        surfaceView = sv;
        RecordSetting recordSetting = new RecordSetting();
        CameraSetting cameraSetting = new CameraSetting();
        cameraSetting.fps = 15;//30;
        cameraSetting.cameraW = cameraWidth;
        cameraSetting.cameraH = cameraHeight;
        cameraSetting.cameraPosition = 0;//1;
        RenderSetting renderSetting = new RenderSetting();

        /*String s = Environment.getExternalStorageDirectory().getAbsolutePath() + "/" + System.currentTimeMillis() + ".mp4";
        File file = new File(s);
        try {
            file.createNewFile();
        } catch (IOException e) {
            e.printStackTrace();
        }*/

        recorder = new RecordVideoAndAudioManager(mActivity, null, recordSetting, cameraSetting, renderSetting, surfaceView);
        recorder.setCallBackEvent(new RecordManageBase.CallBackEvent() {
            @Override
            public void startRecordSuccess() {
                Log.e("main", "startRecordSuccess");
            }

            @Override
            public void onDuringUpdate(float time) {
                Log.e("main", "onDuringUpdate => time = " + time);
            }

            @Override
            public void stopRecordFinish(File file) {
                Log.e("main", "stopRecordFinish => path = " + file.getPath());
            }

            @Override
            public void recordError(String errorMsg) {
                Log.e("main", "recordError => msg = " + errorMsg);
            }

            @Override
            public void openCameraSuccess(int cameraPosition) {

                recorder.getRecordSetting().setVideoSetting(TargetLongWidth,TargetShortWidth,
                        recorder.getCameraManager().getRealFps() / 1000, RecordSetting.ColorFormatDefault);
                recorder.getRecordSetting().setVideoBitRate(3000 * 1024);
                recorder.switchOnBeauty(cameraPosition == 1);
                Log.e("main", "openCameraSuccess => cameraPosition = " + cameraPosition);
            }

            @Override
            public void openCameraFailure(int cameraPosition) {
                Log.e("main", "openCameraFailure => cameraPosition = " + cameraPosition);
            }

            @Override
            public void onVideoSizeChange(int width, int height) {
                Log.e("main", "onVideoSizeChange => width = " + width + ", height = " + height);
            }

            @Override
            public void onPhotoSizeChange(int width, int height) {
                Log.e("main", "onPhotoSizeChange => width = " + width + ", height = " + height);
            }
        });

        holder = surfaceView.getHolder();
        holder.addCallback(new CustomCallBack());
    }

    public void onDestroy() {
        if (recorder != null)
            recorder.destroy();
    }

    public void recordVideo(){
        if (recorder.isRecord){
            stopRecord();
        } else {
            startRecord();
        }
    }

    /**
     * 录像前先创建个文件先
     */
    public void setVideoFileName() {
        String path = "/sdcard/test_A.mp4";
        String pathMp3 = "/sdcard/test.mp3";
        File file = new File(path);
        File mp3 = new File(pathMp3);
        try {
            file.createNewFile();
            mp3.createNewFile();
        } catch (IOException e) {
            e.printStackTrace();
        }
        recorder.setFileName(file, mp3);
    }

    public void startRecord(){
        recorder.startRecord();
    }

    public void stopRecord(){
        recorder.stopRecord();
    }

    public IAudioListener getGLAudioListener(){
        return recorder.getGLAudioListener();
    }

    private class CustomCallBack implements SurfaceHolder.Callback {

        @Override
        public void surfaceCreated(SurfaceHolder holder) {
            recorder.init();
            //recorder.getCameraManager().getEvent().openCameraFailure(0);
        }

        @Override
        public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
            recorder.getRenderSetting().setRenderSize(width < height ? TargetShortWidth : TargetLongWidth, width < height ? TargetLongWidth : TargetShortWidth);
            recorder.getRenderSetting().setDisplaySize(width, height);
        }

        @Override
        public void surfaceDestroyed(SurfaceHolder holder) {
            recorder.destroy();
        }

    }
}

第二路摄像头预览代码:

import android.content.Context;
import android.content.res.Configuration;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.Environment;
import android.util.Log;
import android.view.TextureView;
import android.view.View;

import androidx.annotation.NonNull;

import com.reach.mutilcamerarecord.MainActivity;
import com.reach.mutilcamerarecord.R;
import com.reach.mutilcamerarecord.mediausb.client.RecorderClient;
import com.reach.mutilcamerarecord.mediausb.code.listener.IVideoChange;
import com.reach.mutilcamerarecord.mediausb.code.listener.UvcPcmDataCallBack;
import com.reach.mutilcamerarecord.mediausb.filter.filter.DrawMultiImageFilter;
import com.reach.mutilcamerarecord.mediausb.model.MediaConfig;
import com.reach.mutilcamerarecord.mediausb.model.RecordConfig;
import com.reach.mutilcamerarecord.mediausb.model.Size;
import com.reach.mutilcamerarecord.mediausb.view.AspectTextureView;

/**
 * Author:lzh on 2021/6/10 10:04
 */
public class GLUsbManager {
    private MainActivity mActivity;
    private Context mContext;

    protected RecorderClient mRecorderClient;
    protected AspectTextureView mTextureView;
    RecordConfig recordConfig;

    boolean isStart = false;

    public GLUsbManager(MainActivity activity, Context context){
        mActivity = activity;
        mContext = context;

        initView();
    }

    private void initView(){

        mTextureView = mActivity.findViewById(R.id.camera_usb);
        mTextureView.setVisibility(View.VISIBLE);
        mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
            @Override
            public void onSurfaceTextureAvailable(@NonNull SurfaceTexture surface, int width, int height) {
                if (mRecorderClient != null) {
                    mRecorderClient.startPreview(surface, width, height);
                }
                updateOutsideCamState(1);
                Log.i("textureAvailable", "usb camera start to preview...");
            }

            @Override
            public void onSurfaceTextureSizeChanged(@NonNull SurfaceTexture surface, int width, int height) {
                if (mRecorderClient != null) {
                    mRecorderClient.updatePreview(width, height);
                }
            }

            @Override
            public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surface) {
                if (mRecorderClient != null) {
                    mRecorderClient.stopPreview(true);
                }
                updateOutsideCamState(0);
                return false;
            }

            @Override
            public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surface) {

            }
        });
        prepareStreamingClient();
        onSetFilters();
    }

    private void prepareStreamingClient() {
        try {
            mRecorderClient = new RecorderClient(this);

            recordConfig = RecordConfig.obtain();
            recordConfig.setTargetVideoSize(new Size(1280, 720));
            int bitRate = 1280 * 720;
            recordConfig.setSquare(true);//方正,不圆角
            recordConfig.setBitRate(bitRate);
            recordConfig.setVideoFPS(15);
            recordConfig.setVideoGOP(1);
            recordConfig.setRenderingMode(MediaConfig.Rending_Model_OpenGLES);
            //camera
            recordConfig.setDefaultCamera(Camera.CameraInfo.CAMERA_FACING_FRONT);
            int frontDirection, backDirection;
            Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
            Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_FRONT, cameraInfo);
            frontDirection = cameraInfo.orientation;
            Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_BACK, cameraInfo);
            backDirection = cameraInfo.orientation;
            if (mContext.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
                recordConfig.setFrontCameraDirectionMode((frontDirection == 90 ? MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_270 : MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90) | MediaConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL);
                recordConfig.setBackCameraDirectionMode((backDirection == 90 ? MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90 : MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_270));
            } else {
                recordConfig.setBackCameraDirectionMode((backDirection == 90 ? MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0 : MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_180));
                recordConfig.setFrontCameraDirectionMode((frontDirection == 90 ? MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_180 : MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0) | MediaConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL);
            }
            //save video
            String mSaveVideoPath = Environment.getExternalStorageDirectory().getPath() + "/usb_video" + System.currentTimeMillis() + ".mp4";
            recordConfig.setSaveVideoPath(mSaveVideoPath);

            if (!mRecorderClient.prepare(mContext, recordConfig)) {
                mRecorderClient = null;
                Log.e("RecordingActivity", "prepare,failed!!");
                updateOutsideCamState(0);
                return;
            }
            
            Size s = mRecorderClient.getVideoSize();
            mTextureView.setAspectRatio(AspectTextureView.MODE_FITXY, ((double) s.getWidth()) / s.getHeight());

            mRecorderClient.setVideoChangeListener(new IVideoChange() {
                @Override
                public void onVideoSizeChanged(int width, int height) {
                    mTextureView.setAspectRatio(AspectTextureView.MODE_FITXY, ((double) width) / height);
                }
            });
            
        }catch (Exception e){
            e.printStackTrace();
        }
    }

    protected void onSetFilters() {
        mRecorderClient.setHardVideoFilter(new DrawMultiImageFilter(mContext));
    }

    public void recordVideo(){
        if (!isStart) {
            updateOutsideCamState(1);
            String path = "/sdcard/test_B.mp4";
            mRecorderClient.updatePath(path);
            mRecorderClient.startRecording();
        } else{
            mRecorderClient.stopRecording();
        }
        isStart = !isStart;
    }

    public void setUsbAudioCallback(UvcPcmDataCallBack l){
        mActivity.setUsbAudioCallback(l);
    }

    public void updateOutsideCamState(int status){
        try {
           // mActivity.updateOutsideCamState(status);
        }catch (Exception e){
            e.printStackTrace();
        }
    }


}

接下来录像操作:

/**
     * 开始/结束录像按钮
     */
    private void recordFunc(){
        if (isRecording) {
            mRecordBtn.setText("开始录像");
        }else {
            mRecordBtn.setText("结束录像");
        }
        recordVideo();
        isRecording = !isRecording;
    }

    private void recordVideo(){
        mAudioManager.record();
        if (glUsbManager != null) {
            glUsbManager.recordVideo();
        } else {
            Log.e("usbCamera", "start usb camera record fail,gl usb manager is null...");
        }
        mGLRecord.setVideoFileName();
        mGLRecord.recordVideo();

        getGLAudioListener();
    }

另外,由于麦克风在Android上是独占性质,所以拿到pcm音频流后,需要分出两路供两个摄像头录像使用:

/**
     * 原始音频来了转发给各线程分享
     * @param buffer
     * @param readBytes
     */
    public void onFramePcm(ByteBuffer buffer, int readBytes) {
        if (uvcPcmDataCallBack != null){
            uvcPcmDataCallBack.onFramePcm(buffer, readBytes);//UVC
            buffer.flip();
            //buffer.clear();
        }
        if (glAudioCallback != null) {
            glAudioCallback.onPcm(buffer, readBytes);//本地
            buffer.flip();
        }
    }

最后时间水印重要部分代码如下:

import android.content.Context;
import android.graphics.Rect;
import android.graphics.RectF;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.util.Log;

import com.reach.mutilcamerarecord.mediausb.filter.hardvideofilter.BaseHardVideoFilter;
import com.reach.mutilcamerarecord.mediausb.tools.GLESTools;
import com.reach.mutilcamerarecord.utils.BitmapUtils;

import java.nio.FloatBuffer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;

/**
 * Author:lzh on 2021/7/14 17:34
 */
public class DrawMultiImageFilter extends BaseHardVideoFilter {
    protected int glProgram;
    protected int glCamTextureLoc;
    protected int glCamPostionLoc;
    protected int glCamTextureCoordLoc;
    protected int glImageTextureLoc;
    protected int glImageRectLoc;
    protected int glImageAngelLoc;

    protected Context mContext;
    //private ArrayList<ImageDrawData> mImageInfos = new ArrayList<>();
    private ArrayList<ImageTexture> imageTextures = new ArrayList<>();
    private int mSize = 12;

    int textureId;
    int frameBuffer;
    Rect rect;
    int x;
    int y;
    String str;

    private final static SimpleDateFormat formatter   = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

    public DrawMultiImageFilter(Context context) {
        super();
        mContext = context;
    }

    @Override
    public void onInit(int videoWidth, int videoHeight) {
        super.onInit(videoWidth, videoHeight);
        glProgram = GLESTools.createProgram(GLESTools.uRes(mContext.getResources(), "drawimage_vertex.sh"),
                GLESTools.uRes(mContext.getResources(), "drawimage_fragment.sh"));
        GLES20.glUseProgram(glProgram);
        glCamTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture");
        glImageTextureLoc = GLES20.glGetUniformLocation(glProgram, "uImageTexture");
        glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition");
        glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord");
        glImageRectLoc = GLES20.glGetUniformLocation(glProgram, "imageRect");
        glImageAngelLoc = GLES20.glGetUniformLocation(glProgram, "imageAngel");

        x = 20;
        y = videoHeight - 50;//时间水印设在左下角
Log.e("water", "usb water title,y pos:" + y + ",h=" + videoHeight);
        initImageTexture();
    }

    protected void initImageTexture() {
        imageTextures = new ArrayList<>();
       ImageTexture imageTexture;
        for (int i = 0; i < 19; i++) {
            imageTexture = new ImageTexture(outVideoWidth, outVideoHeight);
            if (i == 10) {
                imageTexture.loadBitmap(BitmapUtils.textToBitmap("-"));
            } else if (i == 11) {
                imageTexture.loadBitmap(BitmapUtils.textToBitmap(":"));
            } else if (i >=0 && i <= 9){
                imageTexture.loadBitmap(BitmapUtils.textToBitmap(i + ""));
            } else {
                imageTexture.loadBitmap(BitmapUtils.textToBitmap("*"));
            }
            imageTextures.add(imageTexture);
        }
        mSize = imageTextures.size();
    }

    @Override
    public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textureBuffer) {
        GLES20.glViewport(0, 0, outVideoWidth, outVideoHeight);
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);

        ImageTexture preImageTexture = null;
        String time = formatter.format(new Date());

        if ("".equals(time)) {
            return;
        }
        for (int i = 0; i < mSize; i++) {
            if (preImageTexture == null) {
                textureId = cameraTexture;
            } else {
                textureId = preImageTexture.getTextureId();
            }
            if (i == mSize - 1) {
                frameBuffer = targetFrameBuffer;
            } else {
                frameBuffer = imageTextures.get(i).getFrameBuffer();
            }
            rect = new Rect(x+15*i, y, x+15*i+220, y+40);
            if (rect.left == rect.right || rect.top == rect.bottom) {
                continue;
            }
            str = time.substring(i, i+1);
            if (str.equals("-")){
                str = "10";
            } else if (str.equals(":")){
                str = "11";
            } else if (str.equals(" ") || str.equals("*")){
                continue;
            }
            drawImage(convertToRectF(rect), imageTextures.get(Integer.parseInt(str)).getImageTextureId(), textureId, frameBuffer, shapeBuffer, textureBuffer);
            preImageTexture = imageTextures.get(i);
        }
        GLES20.glFinish();
    }

    protected void drawImage(RectF rectF, int imageTextureId, int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textureBuffer) {
        GLES20.glEnableVertexAttribArray(glCamPostionLoc);
        GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc);
        shapeBuffer.position(0);
        GLES20.glVertexAttribPointer(glCamPostionLoc, 2,
                GLES20.GL_FLOAT, false,
                2 * 4, shapeBuffer);
        textureBuffer.position(0);
        GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2,
                GLES20.GL_FLOAT, false,
                2 * 4, textureBuffer);
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer);
        GLES20.glUseProgram(glProgram);
        GLES20.glUniform4f(glImageRectLoc, rectF.left, rectF.top, rectF.right, rectF.bottom);
//        GLES20.glUniform1f(glImageAngelLoc, (float)(30.0f*Math.PI/180));//用来更新旋转角度的
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture);
        GLES20.glUniform1i(glCamTextureLoc, 0);
        GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, imageTextureId);
        GLES20.glUniform1i(glImageTextureLoc, 1);
        GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer);
        GLES20.glDisableVertexAttribArray(glCamPostionLoc);
        GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
        GLES20.glUseProgram(0);
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
    }

    @Override
    public void onDestroy() {
        super.onDestroy();
        GLES20.glDeleteProgram(glProgram);
        destroyImageTexture();
    }

    protected void destroyImageTexture() {
        for (ImageTexture imageTexture : imageTextures) {
            imageTexture.destroy();
        }
    }

    private RectF convertToRectF(Rect iconRect) {
        RectF iconRectF = new RectF();
        iconRectF.top = iconRect.top / (float) outVideoHeight;
        iconRectF.bottom = iconRect.bottom / (float) outVideoHeight;
        iconRectF.left = iconRect.left / (float) outVideoWidth;
        iconRectF.right = iconRect.right / (float) outVideoWidth;
        return iconRectF;
    }

    public static class ImageDrawData {
        public int resId = 0;
        public Rect rect;
    }
}

本文最后也附上demo的下载地址,供大家下载学习,有什么问题可以直接在文章下面评论或留言哈~~

CSDN下载地址:https://download.csdn.net/download/toyauko/21861215

github下载地址:https://github.com/Liangzhuhua/MutilCameraRecordDemo


点击全文阅读


本文链接:http://zhangshiyu.com/post/26979.html

<< 上一篇 下一篇 >>

  • 评论(0)
  • 赞助本站

◎欢迎参与讨论,请在这里发表您的看法、交流您的观点。

关于我们 | 我要投稿 | 免责申明

Copyright © 2020-2022 ZhangShiYu.com Rights Reserved.豫ICP备2022013469号-1