android 通过相机进行视频采集存储为文件,但采集视频内容播放是不正常的,怎么解决?

osc_73936194 发布于 09/28 16:23
阅读 53
收藏 0

MySQL连接为什么挂死了?别踩坑!>>>

我弄视频采集,虽然成功了,但拍出的视频不正常,不正常因素预览时,自动对焦无效果,拍摄视频采集时,内容不清晰,由于自己没有编写播放器代码,所以用第三方播放器测试采集的视频效果,

 

采集视频的文件在一些第三方播放器软件上播放失败,无法播放,在其他的一些播放器能播放,但是其他播放器播放的效果却是进度条不前进,播放完成时其他播放器闪退,不知原因。请大佬指点问题出在哪里。

 

贴出代码,以下是代码:

Activity:

xml界面代码:

<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
        xmlns:android="http://schemas.android.com/apk/res/android"
        xmlns:tools="http://schemas.android.com/tools"
        xmlns:app="http://schemas.android.com/apk/res-auto"
        android:layout_width="match_parent"
        android:layout_height="match_parent"
        android:orientation="vertical"
        tools:context=".VideoTestActivity">

    <FrameLayout android:layout_width="match_parent"
                 android:layout_height="match_parent">

        <SurfaceView
                android:id="@+id/sub_v"
                android:layout_width="match_parent"

                android:layout_height="match_parent">


        </SurfaceView>
       <LinearLayout android:layout_width="match_parent"  android:layout_height="wrap_content"
       android:orientation="vertical"
                     android:layout_margin="10dp"

                     android:layout_gravity="bottom"
       >

           <Button

                   android:text="录制"
                   android:layout_width="match_parent"
                   android:onClick="paly_click"
                   android:layout_height="wrap_content" android:id="@+id/button"/>
           <Button
                   android:text="结束"
                   android:onClick="stop_click"
                   android:layout_width="match_parent"
                   android:layout_height="wrap_content" android:id="@+id/button4"/>


       </LinearLayout>


    </FrameLayout>

</LinearLayout>

Activity代码:

package com.example.videoaudiotest;

import android.animation.ObjectAnimator;
import android.animation.PropertyValuesHolder;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.camera2.*;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.ExifInterface;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Build;
import android.os.Environment;
import android.text.format.DateFormat;
import android.util.Size;
import android.view.*;
import android.widget.ImageView;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import commom.BaseActivity;
import commom.Logger;
import commom.vedior.H264Encoder;


import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ExecutorService;

public class VideoTestActivity extends BaseActivity {


    private CameraTools ct;

        @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_video_test);

            ct=new CameraTools().initCamera(this,R.id.sub_v);



    }



    //endregion

    //region 事件



    public void paly_click(View view) {


        ct.startCapture();
    }

    public void stop_click(View view) {

        ct.stopCapture();

    }

    


    //region 获取测试资源的路径




    public String getTestDirectoryPath(String suffix){

        return    Environment.getExternalStorageDirectory().getPath()+"/aaatest/"+getTestFileName(suffix);
    }

    public String getTestFileName(String suffix){

        String a=(DateFormat.format("yyyyMMdd_HHmmss", Calendar.getInstance(Locale.CHINA)) + "."+suffix);

        return    a;
    }

    public String getPathWithoutSuffix(String path){
        return path.substring(0, path.lastIndexOf("."));
    }



    //endregion

    //region 相机常用方法

    public class CameraTools{

        SurfaceView mSurfaceView;

        private SurfaceHolder mHolder;
        private Camera mCamera;
        private Camera.Parameters mParameters;

        private  H264Encoder encoder;
        private int width = 1280;
        private int height = 720;
        private int framerate = 30;

        //#region 初始化

        public CameraTools initCamera(Context content,int control_id){
            if(checkCameraHardware(content)==false){
                return null;
            }
            mSurfaceView=findViewById(control_id);
            mHolder = mSurfaceView.getHolder();
            mHolder.addCallback(surfaceHolderCallbackVideo);
            mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);



            return this;


        }



        private SurfaceHolder.Callback surfaceHolderCallbackVideo= new SurfaceHolder.Callback() {
            @Override
            public void surfaceCreated(SurfaceHolder surfaceHolder) {

                mCamera = Camera.open();
                mCamera.setDisplayOrientation(90);
                Camera.Parameters parameters = mCamera.getParameters();
                parameters.setPreviewFormat(ImageFormat.NV21);
                parameters.setPreviewSize(1280, 720);
                try {
                    mCamera.setParameters(parameters);
                    mCamera.setPreviewDisplay(surfaceHolder);

                    mCamera.setPreviewCallback(captureVideo);
                    mCamera.startPreview();
                } catch (IOException e) {
                    e.printStackTrace();
                }



            }
            @Override
            public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {

            }



            @Override
            public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
                if (mCamera != null) {

                    mCamera.stopPreview();
                    mCamera.release();
                    mCamera = null;
                }

                if (encoder != null) {
                    encoder.stopEncoder();
                }
            }
        };





        private boolean checkCameraHardware(Context context) {
            if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
                Logger.d("#default","此手机拥有相机");
                return true;
            } else {

                return false;
            }
        }

        //endregion

      
        //region 流相关

        private boolean isCapture=false;

        public void startCapture(){

            isCapture=true;


            encoder = new H264Encoder(width, height, framerate);
            String path=getTestDirectoryPath("mp4");
            encoder.startEncoder(path);
            Logger.d("#default","采集开始");
        }

        public void stopCapture(){
            isCapture=false;
            encoder.stopEncoder();
            encoder=null;
            Logger.d("#default","采集结束");
        }


       private Camera.PreviewCallback captureVideo= new Camera.PreviewCallback() {
            @Override
            public void onPreviewFrame(byte[] bytes, Camera camera) {

                if (encoder != null) {
                    encoder.putData(bytes);
                }


            }
        };



        //endregion


    }
    //endregion



}

H264Encoder:

package commom.vedior;

import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Environment;
import commom.Logger;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.ArrayBlockingQueue;

/**
 * H264 编码类
 */
public class H264Encoder {

    private final static int TIMEOUT_USEC = 12000;

    private MediaCodec mediaCodec;

    public boolean isRuning = false;
    private int width, height, framerate;
    public byte[] configbyte;

    private BufferedOutputStream outputStream;

    public ArrayBlockingQueue<byte[]> yuv420Queue = new ArrayBlockingQueue<>(10);

    private String filePath;



    /***
     * 构造函数
     * @param width
     * @param height
     * @param framerate
     */
    public H264Encoder(int width, int height, int framerate) {
        this.width = width;
        this.height = height;
        this.framerate = framerate;

        MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 5);
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
        try {
            mediaCodec = MediaCodec.createEncoderByType("video/avc");
            mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            mediaCodec.start();
            //createfile();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    private void createfile() {
        String path = null;

    
            path=  filePath;

        File file = new File(path);
        if (file.exists()) {
            file.delete();
        }
        try {
            outputStream = new BufferedOutputStream(new FileOutputStream(file));
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void putData(byte[] buffer) {
        if (yuv420Queue.size() >= 10) {
            yuv420Queue.poll();
        }
        yuv420Queue.add(buffer);
    }

    public void startEncoder(String filePath){

        this.filePath=filePath;
        createfile();
        startEncoder();
    }

    /***
     * 开始编码
     */
    public void startEncoder() {
        new Thread(new Runnable() {

            @Override
            public void run() {
                isRuning = true;
                byte[] input = null;
                long pts = 0;
                long generateIndex = 0;

                while (isRuning) {
                    if (yuv420Queue.size() > 0) {
                        input = yuv420Queue.poll();
                        byte[] yuv420sp = new byte[width * height * 3 / 2];
                        // 必须要转格式,否则录制的内容播放出来为绿屏
                        NV21ToNV12(input, yuv420sp, width, height);
                        input = yuv420sp;
                    }
                    if (input != null) {
                        try {
                            ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
                            ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
                            int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
                            if (inputBufferIndex >= 0) {
                                pts = computePresentationTime(generateIndex);
                                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                                inputBuffer.clear();
                                inputBuffer.put(input);
                                mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, System.currentTimeMillis(), 0);
                                generateIndex += 1;
                            }

                            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
                            int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
                            while (outputBufferIndex >= 0) {
                                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
                                byte[] outData = new byte[bufferInfo.size];
                                outputBuffer.get(outData);
                                if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
                                    configbyte = new byte[bufferInfo.size];
                                    configbyte = outData;
                                } else if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_SYNC_FRAME) {
                                    byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
                                    System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
                                    System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);
                                    outputStream.write(keyframe, 0, keyframe.length);
                                } else {
                                    outputStream.write(outData, 0, outData.length);
                                }

                                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
                            }

                        } catch (Throwable t) {
                            t.printStackTrace();
                        }
                    } else {
                        try {
                            Thread.sleep(500);
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        }
                    }
                }

                // 停止编解码器并释放资源
                try {
                    mediaCodec.stop();
                    mediaCodec.release();
                } catch (Exception e) {
                    e.printStackTrace();
                }

                // 关闭数据流
                try {
                    outputStream.flush();
                    outputStream.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }).start();
    }

    /**
     * 停止编码数据
     */
    public void stopEncoder() {
        isRuning = false;
    }

    private void NV21ToNV12(byte[] nv21, byte[] nv12, int width, int height) {
        if (nv21 == null || nv12 == null) return;
        int framesize = width * height;
        int i = 0, j = 0;
        System.arraycopy(nv21, 0, nv12, 0, framesize);
        for (i = 0; i < framesize; i++) {
            nv12[i] = nv21[i];
        }
        for (j = 0; j < framesize / 2; j += 2) {
            nv12[framesize + j - 1] = nv21[j + framesize];
        }
        for (j = 0; j < framesize / 2; j += 2) {
            nv12[framesize + j] = nv21[j + framesize - 1];
        }
    }

    /**
     * 根据帧数生成时间戳
     */
    private long computePresentationTime(long frameIndex) {
        return 132 + frameIndex * 1000000 / framerate;
    }
}

如何解决这个问题?H264Encoder是我从网上找的,另外从onPreviewFrame方法里采集视频帧合并成视频,还有哪些方法?

加载中
返回顶部
顶部