Welcome

首页 / 移动开发 / Android / Android视频处理之动态时间水印效果

最近的项目中遇到一个非常头痛的需求,在Android端录制视频的时候动态添加像监控画面一样的精确到秒的时间信息,关键是,并不是说只在播放器的界面显示时间就可以了,而是录制到视频里面去,这个MP4在电脑上播放也能看到每个画面的时间。
最后想到的办法是在录制完成以后去处理这个视频。
期间参考了很多资料,比较有用的大概是ffmpeg和比较新的Api mediaCodec系列了。介于ffmpeg都是C实现,和一大堆NDK相关,本人不是太懂,就重点关注了MediaCodec系列。

参考逻辑流程图一目了然的这篇博文

MediaCodec进行编解码的大体逻辑是这样的(转载):


主要函数的调用逻辑如下:


MediaExtractor,MediaCodec,MediaMuxer这三个Api已经可以很多多媒体处理工作了,比如用MediaExtractor+MediaMuxer就可以做音视频剪辑,MediaCodec+MediaMuxer就可以做自定义的录像机,一起用就可以做特效编辑,滤镜之类的了。
添加时间水印效果


关键在于取到的数据帧,是YUV格式的,根据拍摄时选取的不同还不一样,我用到的NV21格式,也就是YUV420sp,拿到NV21格式的帧以后,转成RGB渲染,然后又转回NV21交给encoder,看起来好笨重,也非常地耗时,但我还没找到更好的办法。

 private Bitmap first; private void handleFrameData(byte[] data, MediaCodec.BufferInfo info) { //YUV420sp转RGB数据 5-60ms ByteArrayOutputStream out = new ByteArrayOutputStream(); YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, srcWidth, srcHeight, null); yuvImage.compressToJpeg(new Rect(0, 0, srcWidth, srcHeight), 100, out); byte[] imageBytes = out.toByteArray(); //旋转图像,顺便解决电脑上播放被旋转90度的问题 20-50ms Bitmap image = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length); Bitmap bitmap = rotaingImageView(videoRotation, image); image.recycle(); //渲染文字 0-1ms Canvas canvas = new Canvas(bitmap); canvas.drawText(videoTimeFormat.format(videoFirstTime + info.presentationTimeUs / 1000), 10, 30, paint); //预览处理帧 0-5ms first = bitmap; handler.sendEmptyMessage((int) (info.presentationTimeUs / 1000)); synchronized (MediaCodec.class) {//记得加锁 timeDataContainer.add(new Frame(info, bitmap)); } } /* * 旋转图片 * @param angle * @param bitmap * @return Bitmap */ public Bitmap rotaingImageView(int angle, Bitmap bitmap) { //旋转图片 动作 Matrix matrix = new Matrix(); matrix.postRotate(angle); // 创建新的图片 return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); }
然后是转回NV21

/** * 获取夹了时间戳的的数据 * * @return */ private Frame getFrameData() { synchronized (MediaCodec.class) {//记得加锁 if (timeDataContainer.isEmpty()) { return null; } //从队列中获取数据 Frame frame = timeDataContainer.remove(0);////取出后将此数据remove掉 既能保证PCM数据块的取出顺序 又能及时释放内存 //转回YUV420sp 120-160ms frame.data = getNV21(dstWidth, dstHeight, frame.bitmap); return frame; } } public static byte[] getNV21(int width, int height, Bitmap scaled) { int[] argb = new int[width * height]; scaled.getPixels(argb, 0, width, 0, 0, width, height); byte[] yuv = new byte[width * height * 3 / 2]; encodeYUV420SP(yuv, argb, width, height); scaled.recycle(); return yuv; } /** * 将bitmap里得到的argb数据转成yuv420sp格式 * 这个yuv420sp数据就可以直接传给MediaCodec,通过AvcEncoder间接进行编码 * * @param yuv420sp 用来存放yuv420sp数据 * @param argb 传入argb数据 * @param width 图片width * @param height 图片height */ public static void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) { final int frameSize = width * height; int yIndex = 0; int uvIndex = frameSize; int a, R, G, B, Y, U, V; int index = 0; for (int j = 0; j < height; j++) { for (int i = 0; i < width; i++) {// a = (argb[index] & 0xff000000) >> 24; // a is not used obviously R = (argb[index] & 0xff0000) >> 16; G = (argb[index] & 0xff00) >> 8; B = (argb[index] & 0xff) >> 0; // well known RGB to YUV algorithm Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16; U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128; V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128; // NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2 // meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other // pixel AND every other scanline. yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y)); if (j % 2 == 0 && index % 2 == 0) {yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U)); } index++; } } }
看到上面的代码执行耗时,根本不可能实时录制时处理,就算后台服务处理,3秒钟的720*480视频得花费约20秒..
解码与编码的地方也有很多,比如编码器在某些手机不支持颜色格式,为了适配更多机型,颜色格式的设置以后需要更换。
 /** * 初始化编码器 */ private void initMediaEncode(String mime) { try { MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, dstWidth, dstHeight); format.setInteger(MediaFormat.KEY_BIT_RATE, 1024 * 512); format.setInteger(MediaFormat.KEY_FRAME_RATE, 27); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);// format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); mediaEncode = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC); mediaEncode.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); } catch (IOException e) { e.printStackTrace(); } if (mediaEncode == null) { JLog.e(tag, "create mediaEncode failed"); return; } mediaEncode.start(); }
补充:匹配大部分手机的颜色模式应该是MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar=21,这个颜色格式是在decode解码首buffer的时候得到的,但这个format居然没有码率,关键帧间隔,以及FPS等,这些只能根据自己情况设
为什么我之前用了YUV420Flexible,android源码里面说的YUV420SemiPlanner弃用
@deprecated Use {@link #COLOR_FormatYUV420Flexible}。
public static final int COLOR_FormatYUV420SemiPlanar        = 21;

不过现在可以从源文件首buffer里面解码读取出来
 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: MediaFormat format = mediaDecode.getOutputFormat(); Log.d(tag, "New format " + format); if (format != null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { videoColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); Log.d(tag, "decode extract get videoColorFormat =" + videoColorFormat);//解码得到视频颜色格式 } initMediaEncode(videoColorFormat);//根据颜色格式初始化编码器 break;
源码:

import android.annotation.TargetApi;import android.app.Service;import android.content.Intent;import android.graphics.Bitmap;import android.graphics.BitmapFactory;import android.graphics.Canvas;import android.graphics.Color;import android.graphics.ImageFormat;import android.graphics.Matrix;import android.graphics.Paint;import android.graphics.Rect;import android.graphics.YuvImage;import android.media.MediaCodec;import android.media.MediaCodecInfo;import android.media.MediaExtractor;import android.media.MediaFormat;import android.media.MediaMetadataRetriever;import android.media.MediaMuxer;import android.os.Binder;import android.os.Build;import android.os.IBinder;import android.os.Message;import android.support.annotation.Nullable;import android.widget.Toast;import java.io.ByteArrayOutputStream;import java.io.File;import java.io.IOException;import java.lang.ref.WeakReference;import java.nio.ByteBuffer;import java.text.SimpleDateFormat;import java.util.ArrayList;import java.util.List;/** * Created by user on 2016/8/13. */@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)public class TestCodecService extends Service { private MediaExtractor extractor; private MediaMuxer muxer; private final static String tag = "px"; private final String TAG = this.getClass().getSimpleName(); private MediaFormat format; private int videoMaxInputSize = 0, videoRotation = 0; private long videoDuration; private boolean decodeOver = false, encoding = false, mCancel, mDelete; //视频流在数据流中的序号 private int videoTrackIndex = -1; private MediaCodec mediaDecode, mediaEncode; private ByteBuffer[] decodeInputBuffers, decodeOutputBuffers; private ArrayList<Frame> timeDataContainer;//数据块容器 private MediaCodec.BufferInfo decodeBufferInfo; private int srcWidth, srcHeight, dstWidth, dstHeight; private SimpleDateFormat videoTimeFormat; private int mProgress, mMax; private VideoCodecDao codecDao; //绘制时间戳的画笔 private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); @Override public void onCreate() { super.onCreate(); JLog.d(TAG, "onCreate"); //视频时间戳显示格式 videoTimeFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); timeDataContainer = new ArrayList<>(); //初始化画笔工具 paint.setColor(Color.WHITE); paint.setTextSize(20); codecDao = VideoCodecDao.getInstance(JingRuiApp.getJRApplicationContext()); } @Override public void onDestroy() { super.onDestroy(); JLog.d(TAG, "onDestroy"); decodeOver = true; encoding = false; } private void init(String srcPath, String dstpath) { MediaMetadataRetriever mmr = new MediaMetadataRetriever(); mmr.setDataSource(srcPath); try {srcWidth = Integer.parseInt(mmr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH));srcHeight = Integer.parseInt(mmr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)); } catch (IllegalArgumentException e) {e.printStackTrace(); } catch (IllegalStateException e) {e.printStackTrace(); } try {extractor = new MediaExtractor();extractor.setDataSource(srcPath);String mime = null;for (int i = 0; i < extractor.getTrackCount(); i++) {//获取码流的详细格式/配置信息MediaFormat format = extractor.getTrackFormat(i);mime = format.getString(MediaFormat.KEY_MIME);if (mime.startsWith("video/")) { videoTrackIndex = i; this.format = format;} else if (mime.startsWith("audio/")) { continue;} else { continue;}}extractor.selectTrack(videoTrackIndex); //选择读取视频数据//创建合成器srcWidth = format.getInteger(MediaFormat.KEY_WIDTH);dstHeight = format.getInteger(MediaFormat.KEY_HEIGHT);videoMaxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);videoDuration = format.getLong(MediaFormat.KEY_DURATION);//videoRotation = format.getInteger(MediaFormat.KEY_ROTATION);videoRotation = 90;//低版本不支持获取旋转,手动写入了if (videoRotation == 90) {dstWidth = srcHeight;dstHeight = srcWidth;} else if (videoRotation == 0) {dstWidth = srcWidth;dstHeight = srcHeight;}mMax = (int) (videoDuration / 1000);//int bit = this.format.getInteger(MediaFormat.KEY_BIT_RATE);JLog.d(tag, "videoWidth=" + srcWidth + ",videoHeight=" + srcHeight + ",videoMaxInputSize=" + videoMaxInputSize + ",videoDuration=" + videoDuration + ",videoRotation=" + videoRotation);//写入文件的合成器muxer = new MediaMuxer(dstpath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);//向合成器添加视频轨//videoTrackIndex = muxer.addTrack(format);MediaCodec.BufferInfo videoInfo = new MediaCodec.BufferInfo();videoInfo.presentationTimeUs = 0;initMediaDecode(mime);initMediaEncode(mime); } catch (IOException e) {e.printStackTrace(); } } //抽出每一帧 @TargetApi(Build.VERSION_CODES.LOLLIPOP) private void extract() { int inputIndex = mediaDecode.dequeueInputBuffer(-1);//获取可用的inputBuffer -1代表一直等待,0表示不等待 建议-1,避免丢帧 if (inputIndex < 0) {JLog.d("px", "=========== code over =======");return; } ByteBuffer inputBuffer = decodeInputBuffers[inputIndex];//拿到inputBuffer inputBuffer.clear(); int length = extractor.readSampleData(inputBuffer, 0); //读取一帧数据,放到解码队列 if (length < 0) {JLog.d("px", "extract Over");decodeOver = true;return; } else {//获取时间戳long presentationTimeUs = extractor.getSampleTime();MediaCodec.BufferInfo videoInfo = new MediaCodec.BufferInfo();videoInfo.offset = 0;videoInfo.size = length;//获取帧类型,只能识别是否为I帧videoInfo.flags = extractor.getSampleFlags();videoInfo.presentationTimeUs = extractor.getSampleTime();//解码视频decode(videoInfo, inputIndex);extractor.advance(); //移动到下一帧 } } private void handleFrameData(byte[] data, MediaCodec.BufferInfo info) { //YUV420sp转RGB数据 5-60ms ByteArrayOutputStream out = new ByteArrayOutputStream(); YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, srcWidth, srcHeight, null); yuvImage.compressToJpeg(new Rect(0, 0, srcWidth, srcHeight), 100, out); byte[] imageBytes = out.toByteArray(); //旋转图像 20-50ms Bitmap image = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length); Bitmap bitmap = rotaingImageView(videoRotation, image); image.recycle(); //渲染文字 0-1ms Canvas canvas = new Canvas(bitmap); canvas.drawText(videoTimeFormat.format(mVideo.videoCreateTime + info.presentationTimeUs / 1000), 10, 30, paint); //通知进度 0-5ms mProgress = (int) (info.presentationTimeUs / 1000); if (mListener != null) {mListener.onProgress(mProgress, mMax); } synchronized (MediaCodec.class) {//记得加锁timeDataContainer.add(new Frame(info, bitmap)); } } public static byte[] getNV21(int width, int height, Bitmap scaled) { int[] argb = new int[width * height]; scaled.getPixels(argb, 0, width, 0, 0, width, height); byte[] yuv = new byte[width * height * 3 / 2]; encodeYUV420SP(yuv, argb, width, height); scaled.recycle(); return yuv; } /** * 将bitmap里得到的argb数据转成yuv420sp格式 * 这个yuv420sp数据就可以直接传给MediaCodec,通过AvcEncoder间接进行编码 * * @param yuv420sp 用来存放yuv420sp数据 * @param argb 传入argb数据 * @param width 图片width * @param height 图片height */ public static void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) { final int frameSize = width * height; int yIndex = 0; int uvIndex = frameSize; int a, R, G, B, Y, U, V; int index = 0; for (int j = 0; j < height; j++) {for (int i = 0; i < width; i++) {//a = (argb[index] & 0xff000000) >> 24; // a is not used obviouslyR = (argb[index] & 0xff0000) >> 16;G = (argb[index] & 0xff00) >> 8;B = (argb[index] & 0xff) >> 0;// well known RGB to YUV algorithmY = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;// NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other// pixel AND every other scanline.yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));if (j % 2 == 0 && index % 2 == 0) { yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V)); yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));}index++;} } } /** * 获取夹了时间戳的的数据 * * @return */ private Frame getFrameData() { synchronized (MediaCodec.class) {//记得加锁if (timeDataContainer.isEmpty()) {return null;}//从队列中获取数据Frame frame = timeDataContainer.remove(0);////取出后将此数据remove掉 既能保证PCM数据块的取出顺序 又能及时释放内存//转回YUV420sp 120-160msframe.data = getNV21(dstWidth, dstHeight, frame.bitmap);return frame; } } /* * 旋转图片 * @param angle * @param bitmap * @return Bitmap */ public Bitmap rotaingImageView(int angle, Bitmap bitmap) { //旋转图片 动作 Matrix matrix = new Matrix(); matrix.postRotate(angle); // 创建新的图片 return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); } /** * 初始化解码器 */ private void initMediaDecode(String mime) { try {//创建解码器mediaDecode = MediaCodec.createDecoderByType(mime);mediaDecode.configure(format, null, null, 0); } catch (IOException e) {e.printStackTrace(); } if (mediaDecode == null) {JLog.e(tag, "create mediaDecode failed");return; } mediaDecode.start(); decodeInputBuffers = mediaDecode.getInputBuffers(); decodeOutputBuffers = mediaDecode.getOutputBuffers(); decodeBufferInfo = new MediaCodec.BufferInfo();//用于描述解码得到的byte[]数据的相关信息 } /** * 初始化编码器 */ private void initMediaEncode(String mime) { try {MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, dstWidth, dstHeight);format.setInteger(MediaFormat.KEY_BIT_RATE, 1024 * 512);format.setInteger(MediaFormat.KEY_FRAME_RATE, 27);format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);//format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);mediaEncode = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);mediaEncode.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); } catch (IOException e) {e.printStackTrace(); } if (mediaEncode == null) {JLog.e(tag, "create mediaEncode failed");return; } mediaEncode.start(); } @TargetApi(Build.VERSION_CODES.LOLLIPOP) private void decode(MediaCodec.BufferInfo videoInfo, int inputIndex) { mediaDecode.queueInputBuffer(inputIndex, 0, videoInfo.size, videoInfo.presentationTimeUs, videoInfo.flags);//通知MediaDecode解码刚刚传入的数据 //获取解码得到的byte[]数据 参数BufferInfo上面已介绍 10000同样为等待时间 同上-1代表一直等待,0代表不等待。此处单位为微秒 //此处建议不要填-1 有些时候并没有数据输出,那么他就会一直卡在这 等待 MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputIndex = mediaDecode.dequeueOutputBuffer(bufferInfo, 50000); switch (outputIndex) {case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:JLog.d(tag, "INFO_OUTPUT_BUFFERS_CHANGED");decodeOutputBuffers = mediaDecode.getOutputBuffers();break;case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:JLog.d(tag, "New format " + mediaDecode.getOutputFormat());break;case MediaCodec.INFO_TRY_AGAIN_LATER:JLog.d(tag, "dequeueOutputBuffer timed out!");break;default:ByteBuffer outputBuffer;byte[] frame;while (outputIndex >= 0) {//每次解码完成的数据不一定能一次吐出 所以用while循环,保证解码器吐出所有数据 outputBuffer = decodeOutputBuffers[outputIndex];//拿到用于存放PCM数据的Buffer frame = new byte[bufferInfo.size];//BufferInfo内定义了此数据块的大小 outputBuffer.get(frame);//将Buffer内的数据取出到字节数组中 outputBuffer.clear();//数据取出后一定记得清空此Buffer MediaCodec是循环使用这些Buffer的,不清空下次会得到同样的数据 handleFrameData(frame, videoInfo);//自己定义的方法,供编码器所在的线程获取数据,下面会贴出代码 mediaDecode.releaseOutputBuffer(outputIndex, false);//此操作一定要做,不然MediaCodec用完所有的Buffer后 将不能向外输出数据 outputIndex = mediaDecode.dequeueOutputBuffer(decodeBufferInfo, 50000);//再次获取数据,如果没有数据输出则outputIndex=-1 循环结束}break; } } /** * 编码 */ private void encode() { //获取解码器所在线程输出的数据 byte[] chunkTime; Frame frame = getFrameData(); if (frame == null) {return; } chunkTime = frame.data; int inputIndex = mediaEncode.dequeueInputBuffer(-1);//同解码器 if (inputIndex < 0) {JLog.d("px", "dequeueInputBuffer return inputIndex " + inputIndex + ",then break");mediaEncode.signalEndOfInputStream(); } ByteBuffer inputBuffer = mediaEncode.getInputBuffers()[inputIndex];//同解码器 inputBuffer.clear();//同解码器 inputBuffer.put(chunkTime);//PCM数据填充给inputBuffer inputBuffer.limit(frame.videoInfo.size); mediaEncode.queueInputBuffer(inputIndex, 0, chunkTime.length, frame.videoInfo.presentationTimeUs, frame.videoInfo.flags);//通知编码器 编码 MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputIndex = mediaEncode.dequeueOutputBuffer(bufferInfo, 50000);//同解码器 switch (outputIndex) {case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:JLog.d(tag, "INFO_OUTPUT_BUFFERS_CHANGED");break;case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:MediaFormat outputFormat = mediaEncode.getOutputFormat();outputFormat.setInteger(MediaFormat.KEY_ROTATION, videoRotation);JLog.d(tag, "mediaEncode find New format " + outputFormat);//向合成器添加视频轨videoTrackIndex = muxer.addTrack(outputFormat);muxer.start();break;case MediaCodec.INFO_TRY_AGAIN_LATER:JLog.d(tag, "dequeueOutputBuffer timed out!");break;default:ByteBuffer outputBuffer;while (outputIndex >= 0) {//同解码器 outputBuffer = mediaEncode.getOutputBuffers()[outputIndex];//拿到输出Buffer muxer.writeSampleData(videoTrackIndex, outputBuffer, bufferInfo);// JLog.d("px", "writeSampleData:" + bufferInfo.size); mediaEncode.releaseOutputBuffer(outputIndex, false); outputIndex = mediaEncode.dequeueOutputBuffer(bufferInfo, 50000);}break; } } private void release() { //全部写完后释放MediaMuxer和MediaExtractor extractor.release(); mediaDecode.release(); mediaEncode.release(); muxer.stop(); muxer.release(); } private DecodeRunnable decodeRunnable; private EncodeRunnable encodeRunnable; /** * 解码线程 */ private class DecodeRunnable extends Thread { @Override public void run() {decodeOver = false;while (!decodeOver) {try { extract();} catch (Exception e) { //抓住删除文件造成的异常 JLog.e("px", e.toString());}synchronized (encodeRunnable) { encodeRunnable.notify();}} } } /** * 编码线程 */ private class EncodeRunnable extends Thread { @Override public void run() {encoding = true;while (encoding) {if (timeDataContainer.isEmpty()) { if (decodeOver) {//解码完成,缓存也清空了 break; } try { synchronized (encodeRunnable) {wait(); } } catch (InterruptedException e) { e.printStackTrace(); }} else { encode();}}release();encoding = false;handler.sendEmptyMessage(-2);//发送消息完成任务 } } android.os.Handler handler = new android.os.Handler() { @Override public void handleMessage(Message msg) {switch (msg.what) {case -2: onComplete(); break;default: break;} } }; public void onComplete() { if (mDelete) {//delete请求,是在cancel事件前提下mDelete = false;new File(mVideo.srcPath).delete();//主动请求删除,删源文件,和数据库codecDao.deleteItem(mVideo);JLog.d("px", "delete file " + mVideo.srcPath); } else {mVideo.finish = mCancel ? 0 : 100;codecDao.createOrUpdate(mVideo);//更新数据库状态为已完成,或闲置中 } if (mCancel) {//中途取消mCancel = false;new File(mVideo.dstPath).delete();//取消,删除目标文件JLog.d("px", "delete file " + mVideo.dstPath); } else {//顺利完成new File(mVideo.srcPath).delete();//成功,删除源文件JLog.d("px", "delete file " + mVideo.srcPath); } if (mListener != null) {mListener.onCodecFinish(mVideo); } if (!videos.isEmpty()) {VideoCodecModel video = videos.remove(0);start(video); } } class Frame { MediaCodec.BufferInfo videoInfo; byte[] data; Bitmap bitmap; public Frame(MediaCodec.BufferInfo videoInfo, Bitmap bitmap) {this.videoInfo = videoInfo;this.bitmap = bitmap; } } private long getInterval() { //用第一二帧获取帧间隔 long videoSampleTime; ByteBuffer buffer = ByteBuffer.allocate(1024 * 512); //获取源视频相邻帧之间的时间间隔。(1) extractor.readSampleData(buffer, 0); //skip first I frame if (extractor.getSampleFlags() == MediaExtractor.SAMPLE_FLAG_SYNC)extractor.advance(); extractor.readSampleData(buffer, 0); long firstVideoPTS = extractor.getSampleTime(); extractor.advance(); extractor.readSampleData(buffer, 0); long SecondVideoPTS = extractor.getSampleTime(); videoSampleTime = Math.abs(SecondVideoPTS - firstVideoPTS); JLog.d(tag, "videoSampleTime is " + videoSampleTime); return videoSampleTime; } @Override public int onStartCommand(Intent intent, int flags, int startId) { JLog.d(TAG, "onStartCommand"); super.onStartCommand(intent, flags, startId); if (intent == null) {return START_NOT_STICKY; } int action = intent.getIntExtra("action", 0); if (action == REQUEST_CODEC) {VideoCodecModel video = (VideoCodecModel) intent.getSerializableExtra("video");video = codecDao.addItem(video);if (!encoding) {start(video);} else {videos.add(video);} } else if (action == REQUEST_CODEC_CANCEL) {VideoCodecModel video = (VideoCodecModel) intent.getSerializableExtra("video");mDelete = intent.getBooleanExtra("delete", false);//是否删除旧文件JLog.d("px", "----- onStartCommand action " + action + " is delete?" + mDelete);mBinder.cancel(video); } return START_NOT_STICKY; } @Nullable @Override public IBinder onBind(Intent intent) { JLog.d(TAG, "onBind"); return mBinder; } private CodecBinder mBinder = new CodecBinder(); private VideoCodecModel mVideo; //video下载的任务队列 private List<VideoCodecModel> videos = new ArrayList<>(); public static final int REQUEST_CODEC = 0x183; public static final int REQUEST_CODEC_CANCEL = 0x184; public class CodecBinder extends Binder { /*** @param video* @return 是否可以执行, 或等待执行*/ public boolean start(VideoCodecModel video) {video = codecDao.addItem(video);if (!encoding) {TestCodecService.this.start(video);} else {videos.add(video);}return !encoding; } public void setOnProgressChangeListener(OnProgressChangeListener l) {mListener = l; } public VideoCodecModel getCurrentVideo() {return mVideo; } public void cancel(VideoCodecModel video) {if (mVideo.equals(video)) {//正在处理decodeOver = true;//控制解码线程结束encoding = false;//控制编码线程结束mCancel = true;//控制结束后删除文件等} else {//视频没有正在处理boolean flag = videos.remove(video);if (flag) { JLog.d("px", "cancel render task sucess");} else { //并没有这个任务 JLog.d("px", "cancel render task fail,seems this video not in renderring queen");}//删除源文件if (mDelete) { mDelete = false; new File(video.srcPath).delete(); codecDao.deleteItem(video);}} } public List<VideoCodecModel> getVideoList() {return videos; } public void removeListener() {mListener = null; } } private void start(VideoCodecModel video) { if (video == null) {return; } if (!new File(video.srcPath).exists()) {Toast.makeText(this, "该视频缓存文件可能已经被删除", Toast.LENGTH_SHORT).show();video.finish = -100;codecDao.createOrUpdate(video);return; } mVideo = video; if (mListener != null) {mListener.onCodecStart(mVideo); } mVideo.finish = 50;//改成处理中 codecDao.createOrUpdate(mVideo); Runnable runnable = new Runnable() {@Overridepublic void run() {init(mVideo.srcPath, mVideo.dstPath);decodeRunnable = new DecodeRunnable();decodeRunnable.start();encodeRunnable = new EncodeRunnable();encodeRunnable.start();} }; AsyncTaskExecutor.getExecutor().execute(runnable); } private OnProgressChangeListener mListener; public interface OnProgressChangeListener { void onProgress(int progress, int max); void onCodecStart(VideoCodecModel video); void onCodecFinish(VideoCodecModel video); }}//这是模型类import com.j256.ormlite.field.DatabaseField;import com.j256.ormlite.table.DatabaseTable;import java.io.Serializable;/** * Created by user on 2016/8/29. */@DatabaseTable(tableName = "video_codec_task")public class VideoCodecModel implements Serializable { private static final long serialVersionUID = -1307249622002520298L; @DatabaseField public String srcPath; @DatabaseField public String dstPath; @DatabaseField public long videoCreateTime; @DatabaseField(generatedId = true) public int id; //0为被限制的状态,50为渲染中,或渲染队列中,100为已完成,-100为已删除, @DatabaseField public int finish = 0; @DatabaseField public String serno; //操作是用到,不需要存数据库 public boolean select; public VideoCodecModel(String srcPath, String dstPath, long videoCreateTime) { this.srcPath = srcPath; this.videoCreateTime = videoCreateTime; this.dstPath = dstPath; } public VideoCodecModel() { } public String getSrcPath() { return srcPath; } public void setSrcPath(String srcPath) { this.srcPath = srcPath; } public String getDstPath() { return dstPath; } public void setDstPath(String dstPath) { this.dstPath = dstPath; } public long getVideoCreateTime() { return videoCreateTime; } public void setVideoCreateTime(long videoCreateTime) { this.videoCreateTime = videoCreateTime; } public boolean isSelect() { return select; } public void setSelect(boolean select) { this.select = select; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof VideoCodecModel)) return false; VideoCodecModel that = (VideoCodecModel) o; if (videoCreateTime != that.videoCreateTime) return false; if (!srcPath.equals(that.srcPath)) return false; return dstPath.equals(that.dstPath); }}//用来查看水印任务完成状态,和监控Service运行的界面Activity,,Activity的打开与否,不影响服务的运行import android.annotation.TargetApi;import android.app.ProgressDialog;import android.content.ComponentName;import android.content.Context;import android.content.DialogInterface;import android.content.Intent;import android.content.ServiceConnection;import android.net.Uri;import android.os.AsyncTask;import android.os.Build;import android.os.Bundle;import android.os.IBinder;import android.os.Message;import android.support.annotation.Nullable;import android.util.Log;import android.view.Gravity;import android.view.MenuItem;import android.view.View;import android.view.ViewGroup;import android.widget.AdapterView;import android.widget.BaseAdapter;import android.widget.CheckBox;import android.widget.CompoundButton;import android.widget.LinearLayout;import android.widget.ListView;import android.widget.PopupMenu;import android.widget.ProgressBar;import android.widget.TextView;import ...import java.io.File;import java.lang.ref.WeakReference;import java.text.SimpleDateFormat;import java.util.ArrayList;import java.util.Date;import java.util.List;/** * Created by user on 2016/8/29. */public class ShowCodecActivity extends BaseActivity implements TestCodecService.OnProgressChangeListener, View.OnClickListener { private TextView noneTipsView; private List<VideoCodecModel> videos = new ArrayList<>(), cordingVideos; private ListView listView; private BaseAdapter adapter; private View firstTips; @Nullable VideoCodecModel curShowVideo, curRenderVideo; TestCodecService.CodecBinder binder; private ProgressBar progressBar; ServiceConnection connection; VideoCodecDao codecDao; private SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); private boolean mEditMode = false; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_show_codec); setTitle("签约视频列表"); initView(); if (getIntent() != null) {curShowVideo = (VideoCodecModel) getIntent().getSerializableExtra("video"); } codecDao = VideoCodecDao.getInstance(this); final Intent intent = new Intent(this, TestCodecService.class); connection = new ServiceConnection() {@Overridepublic void onServiceConnected(ComponentName name, IBinder service) {Log.d("px", "onServiceConnected");binder = (TestCodecService.CodecBinder) service;binder.setOnProgressChangeListener(ShowCodecActivity.this);videos.clear();curRenderVideo = binder.getCurrentVideo();cordingVideos = binder.getVideoList();videos.addAll(codecDao.queryAll());notifyChange();}@Overridepublic void onServiceDisconnected(ComponentName name) {} }; bindService(intent, connection, Context.BIND_AUTO_CREATE); } private void notifyChange() { if (adapter == null) {adapter = new BaseAdapter() {@Overridepublic int getCount() { return videos.size();}@Overridepublic VideoCodecModel getItem(int position) { return videos.get(position);}@Overridepublic long getItemId(int position) { return 0;}@Overridepublic View getView(int position, View convertView, ViewGroup parent) { final Holder holder; if (convertView == null) { convertView = View.inflate(ShowCodecActivity.this, R.layout.item_show_codec, null); holder = new Holder(); holder.bar = (ProgressBar) convertView.findViewById(R.id.pb_codec); holder.status = (TextView) convertView.findViewById(R.id.status); holder.serno = (TextView) convertView.findViewById(R.id.serno); holder.select = convertView.findViewById(R.id.select); holder.time = (TextView) convertView.findViewById(R.id.time); holder.operate = (TextView) convertView.findViewById(R.id.operate); holder.checkBox = (CheckBox) convertView.findViewById(R.id.cb_select); convertView.setTag(holder); } else { holder = (Holder) convertView.getTag(); } final VideoCodecModel video = getItem(position); if (video.finish == 100) { holder.status.setText("已完成"); holder.operate.setVisibility(View.VISIBLE); holder.operate.setText("操作"); } else if (video.finish == -100) { holder.status.setText("已删除"); holder.operate.setVisibility(View.INVISIBLE); } else if (video.equals(curRenderVideo)) { progressBar = holder.bar; holder.status.setText("处理中"); holder.operate.setVisibility(View.INVISIBLE); } else if (cordingVideos.contains(video)) { holder.status.setText("等待中"); holder.operate.setVisibility(View.VISIBLE); holder.operate.setText("取消"); } else { holder.status.setText("未处理"); holder.operate.setVisibility(View.VISIBLE); holder.operate.setText("开始"); } holder.operate.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) {if (video.finish == 100) {operate(holder.status, video);} else if (video.finish == -100) {return;} else if (video.equals(curRenderVideo)) {//已经在编码中,不可操作return;} else if (cordingVideos.contains(video)) {//已经在编码队列中,可取消binder.cancel(video);holder.status.setText("未处理");holder.operate.setVisibility(View.VISIBLE);holder.operate.setText("开始");} else {boolean immedia = binder.start(video);if (immedia) { holder.status.setText("处理中"); holder.operate.setVisibility(View.INVISIBLE);} else { holder.status.setText("等待中"); holder.operate.setVisibility(View.VISIBLE); holder.operate.setText("取消");}} } }); holder.select.setVisibility(video.equals(curShowVideo) ? View.VISIBLE : View.GONE); holder.serno.setText(video.serno); holder.time.setText(dateFormat.format(new Date(video.videoCreateTime))); holder.checkBox.setVisibility(mEditMode ? View.VISIBLE : View.GONE); holder.checkBox.setChecked(video.isSelect()); holder.checkBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {video.setSelect(isChecked); } }); return convertView;}};listView.setAdapter(adapter); } else {adapter.notifyDataSetChanged(); } noneTipsView.setVisibility(videos.isEmpty() ? View.VISIBLE : View.GONE); more.setVisibility(mEditMode ? View.VISIBLE : View.GONE); back.setVisibility(mEditMode ? View.INVISIBLE : View.VISIBLE); checkBox.setVisibility(mEditMode ? View.VISIBLE : View.GONE); } class Holder { ProgressBar bar; TextView status, serno, time, operate; View select; CheckBox checkBox; } private void initView() { listView = (ListView) findViewById(R.id.lv_codec); noneTipsView = (TextView) findViewById(R.id.tv_none); listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {@Overridepublic void onItemClick(AdapterView<?> parent, View view, int position, long id) {VideoCodecModel video = videos.get(position);operate(view, video);} }); listView.setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() {@Overridepublic boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id) {if (mEditMode) { return false;}mEditMode = true;//启动编辑模式不记住从前的选中状态for (VideoCodecModel video : videos) { if (video.select) video.select = false;}checkBox.setChecked(false);notifyChange();return true;} }); firstTips = findViewById(R.id.ll_tips); boolean visable = Preferences.getBoolean("firstShowCodec", true); firstTips.setVisibility(visable ? View.VISIBLE : View.GONE); if (visable)findViewById(R.id.btn_noshow).setOnClickListener(new View.OnClickListener() {@Overridepublic void onClick(View v) { Preferences.put("firstShowCodec", false); firstTips.setVisibility(View.GONE);}}); checkBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {@Overridepublic void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {for (VideoCodecModel model : videos) { model.setSelect(isChecked);}notifyChange();} }); more.setText("操作"); more.setOnClickListener(this); } private void operate(View view, final VideoCodecModel video) { if (video.finish != 100) {return; } PopupMenu popupMenu = new PopupMenu(ShowCodecActivity.this, view); popupMenu.getMenu().add(1, 0, 0, "预览或发送"); popupMenu.getMenu().add(1, 1, 1, "删除"); popupMenu.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() {@Overridepublic boolean onMenuItemClick(MenuItem item) {switch (item.getItemId()) { case 0: previewVideo(video.dstPath); break; case 1: File file = new File(video.dstPath); if (file.exists()) {file.delete(); } codecDao.deleteItem(video); videos.remove(video); if (cordingVideos.contains(video)) {binder.cancel(video); } notifyChange(); break;}return true;} }); popupMenu.show(); } @Override public void onProgress(int progress, int max) { if (progressBar != null) {progressBar.setMax(max);progressBar.setProgress(progress); } } @Override public void onCodecStart(VideoCodecModel video) { JLog.d("px", "onCodecStart"); curRenderVideo = video; int index = videos.indexOf(video); if (index >= 0) {View child = listView.getChildAt(index);Holder holder = (Holder) child.getTag();holder.status.setText("处理中");holder.operate.setVisibility(View.INVISIBLE);progressBar = holder.bar; } } @Override public void onCodecFinish(VideoCodecModel video) { JLog.d("px", "onCodecFinish"); if (progressBar != null) {progressBar.setProgress(0); } int index = videos.indexOf(video); videos.get(index).finish = 100; if (index >= 0) {View child = listView.getChildAt(index);Holder holder = (Holder) child.getTag();holder.status.setText("已完成");holder.operate.setVisibility(View.VISIBLE);holder.operate.setText("操作");progressBar = null; } } @Override protected void onDestroy() { if (binder != null)binder.removeListener(); unbindService(connection); super.onDestroy(); } private void previewVideo(String filePath) { //预览录像 Intent intent = new Intent(Intent.ACTION_VIEW); String type = "video/mp4"; Uri uri = Uri.parse("file://" + filePath); intent.setDataAndType(uri, type); startActivity(intent); } @Override public void onBackPressed() { if (mEditMode) {mEditMode = false;notifyChange();return; } super.onBackPressed(); } @Override public void onClick(View v) { switch (v.getId()) {case R.id.more:PopupMenu menu = new PopupMenu(this, v);//menu.getMenu().add(1, 0, 0, "发送");menu.getMenu().add(1, 1, 1, "删除");menu.getMenu().add(1, 2, 2, "取消");menu.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(MenuItem item) { switch (item.getItemId()) {case 0:break;case 1:deleteSelect();break;case 2:mEditMode = false;notifyChange();break; } return true; }});menu.show();break; } } //删除所选 private void deleteSelect() { final ProgressDialog dialog = ProgressDialog.show(this, null, null); AsyncTask<String, String, Boolean> task = new AsyncTask<String, String, Boolean>() {@Overrideprotected Boolean doInBackground(String... params) {boolean has = false;//是否选到可以删除的,有可能并未有任何选择for (VideoCodecModel video : videos) { if (video.select) { File file; if (video.finish == 100) {file = new File(video.dstPath); } else {file = new File(video.srcPath); } if (file.exists()) {file.delete(); } codecDao.deleteItem(video); if (!has) {has = true; } }}if (has) { videos.clear(); videos.addAll(codecDao.queryAll());}return has;}@Overrideprotected void onPostExecute(Boolean s) {mEditMode = false;notifyChange();dialog.dismiss();} }; task.executeOnExecutor(AsyncTaskExecutor.getExecutor()); }}
以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持脚本之家。