Android視頻處理之動態(tài)時間水印效果
最近的項目中遇到一個非常頭痛的需求,在Android端錄制視頻的時候動態(tài)添加像監(jiān)控畫面一樣的精確到秒的時間信息,關鍵是,并不是說只在播放器的界面顯示時間就可以了,而是錄制到視頻里面去,這個MP4在電腦上播放也能看到每個畫面的時間。
最后想到的辦法是在錄制完成以后去處理這個視頻。
期間參考了很多資料,比較有用的大概是ffmpeg和比較新的Api mediaCodec系列了。介于ffmpeg都是C實現(xiàn),和一大堆NDK相關,本人不是太懂,就重點關注了MediaCodec系列。
參考邏輯流程圖一目了然的這篇博文
MediaCodec進行編解碼的大體邏輯是這樣的(轉載):

主要函數(shù)的調用邏輯如下:

MediaExtractor,MediaCodec,MediaMuxer這三個Api已經可以很多多媒體處理工作了,比如用MediaExtractor+MediaMuxer就可以做音視頻剪輯,MediaCodec+MediaMuxer就可以做自定義的錄像機,一起用就可以做特效編輯,濾鏡之類的了。
添加時間水印效果

關鍵在于取到的數(shù)據(jù)幀,是YUV格式的,根據(jù)拍攝時選取的不同還不一樣,我用到的NV21格式,也就是YUV420sp,拿到NV21格式的幀以后,轉成RGB渲染,然后又轉回NV21交給encoder,看起來好笨重,也非常地耗時,但我還沒找到更好的辦法。
private Bitmap first;
private void handleFrameData(byte[] data, MediaCodec.BufferInfo info) {
//YUV420sp轉RGB數(shù)據(jù) 5-60ms
ByteArrayOutputStream out = new ByteArrayOutputStream();
YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, srcWidth, srcHeight, null);
yuvImage.compressToJpeg(new Rect(0, 0, srcWidth, srcHeight), 100, out);
byte[] imageBytes = out.toByteArray();
//旋轉圖像,順便解決電腦上播放被旋轉90度的問題 20-50ms
Bitmap image = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
Bitmap bitmap = rotaingImageView(videoRotation, image);
image.recycle();
//渲染文字 0-1ms
Canvas canvas = new Canvas(bitmap);
canvas.drawText(videoTimeFormat.format(videoFirstTime + info.presentationTimeUs / 1000), 10, 30, paint);
//預覽處理幀 0-5ms
first = bitmap;
handler.sendEmptyMessage((int) (info.presentationTimeUs / 1000));
synchronized (MediaCodec.class) {//記得加鎖
timeDataContainer.add(new Frame(info, bitmap));
}
}
/*
* 旋轉圖片
* @param angle
* @param bitmap
* @return Bitmap
*/
public Bitmap rotaingImageView(int angle, Bitmap bitmap) {
//旋轉圖片 動作
Matrix matrix = new Matrix();
matrix.postRotate(angle);
// 創(chuàng)建新的圖片
return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
}
然后是轉回NV21
/**
* 獲取夾了時間戳的的數(shù)據(jù)
*
* @return
*/
private Frame getFrameData() {
synchronized (MediaCodec.class) {//記得加鎖
if (timeDataContainer.isEmpty()) {
return null;
}
//從隊列中獲取數(shù)據(jù)
Frame frame = timeDataContainer.remove(0);////取出后將此數(shù)據(jù)remove掉 既能保證PCM數(shù)據(jù)塊的取出順序 又能及時釋放內存
//轉回YUV420sp 120-160ms
frame.data = getNV21(dstWidth, dstHeight, frame.bitmap);
return frame;
}
}
public static byte[] getNV21(int width, int height, Bitmap scaled) {
int[] argb = new int[width * height];
scaled.getPixels(argb, 0, width, 0, 0, width, height);
byte[] yuv = new byte[width * height * 3 / 2];
encodeYUV420SP(yuv, argb, width, height);
scaled.recycle();
return yuv;
}
/**
* 將bitmap里得到的argb數(shù)據(jù)轉成yuv420sp格式
* 這個yuv420sp數(shù)據(jù)就可以直接傳給MediaCodec,通過AvcEncoder間接進行編碼
*
* @param yuv420sp 用來存放yuv420sp數(shù)據(jù)
* @param argb 傳入argb數(shù)據(jù)
* @param width 圖片width
* @param height 圖片height
*/
public static void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
// a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
// well known RGB to YUV algorithm
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
// NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other
// pixel AND every other scanline.
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
}
index++;
}
}
}
看到上面的代碼執(zhí)行耗時,根本不可能實時錄制時處理,就算后臺服務處理,3秒鐘的720*480視頻得花費約20秒..
解碼與編碼的地方也有很多,比如編碼器在某些手機不支持顏色格式,為了適配更多機型,顏色格式的設置以后需要更換。
/**
* 初始化編碼器
*/
private void initMediaEncode(String mime) {
try {
MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, dstWidth, dstHeight);
format.setInteger(MediaFormat.KEY_BIT_RATE, 1024 * 512);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 27);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
// format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mediaEncode = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
mediaEncode.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (IOException e) {
e.printStackTrace();
}
if (mediaEncode == null) {
JLog.e(tag, "create mediaEncode failed");
return;
}
mediaEncode.start();
}
補充:匹配大部分手機的顏色模式應該是MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar=21,這個顏色格式是在decode解碼首buffer的時候得到的,但這個format居然沒有碼率,關鍵幀間隔,以及FPS等,這些只能根據(jù)自己情況設
為什么我之前用了YUV420Flexible,android源碼里面說的YUV420SemiPlanner棄用
@deprecated Use {@link #COLOR_FormatYUV420Flexible}。
public static final int COLOR_FormatYUV420SemiPlanar = 21;
不過現(xiàn)在可以從源文件首buffer里面解碼讀取出來
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = mediaDecode.getOutputFormat();
Log.d(tag, "New format " + format);
if (format != null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
videoColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Log.d(tag, "decode extract get videoColorFormat =" + videoColorFormat);//解碼得到視頻顏色格式
}
initMediaEncode(videoColorFormat);//根據(jù)顏色格式初始化編碼器
break;
源碼:
import android.annotation.TargetApi;
import android.app.Service;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.media.MediaMetadataRetriever;
import android.media.MediaMuxer;
import android.os.Binder;
import android.os.Build;
import android.os.IBinder;
import android.os.Message;
import android.support.annotation.Nullable;
import android.widget.Toast;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
/**
* Created by user on 2016/8/13.
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public class TestCodecService extends Service {
private MediaExtractor extractor;
private MediaMuxer muxer;
private final static String tag = "px";
private final String TAG = this.getClass().getSimpleName();
private MediaFormat format;
private int videoMaxInputSize = 0, videoRotation = 0;
private long videoDuration;
private boolean decodeOver = false, encoding = false, mCancel, mDelete;
//視頻流在數(shù)據(jù)流中的序號
private int videoTrackIndex = -1;
private MediaCodec mediaDecode, mediaEncode;
private ByteBuffer[] decodeInputBuffers, decodeOutputBuffers;
private ArrayList<Frame> timeDataContainer;//數(shù)據(jù)塊容器
private MediaCodec.BufferInfo decodeBufferInfo;
private int srcWidth, srcHeight, dstWidth, dstHeight;
private SimpleDateFormat videoTimeFormat;
private int mProgress, mMax;
private VideoCodecDao codecDao;
//繪制時間戳的畫筆
private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
@Override
public void onCreate() {
super.onCreate();
JLog.d(TAG, "onCreate");
//視頻時間戳顯示格式
videoTimeFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
timeDataContainer = new ArrayList<>();
//初始化畫筆工具
paint.setColor(Color.WHITE);
paint.setTextSize(20);
codecDao = VideoCodecDao.getInstance(JingRuiApp.getJRApplicationContext());
}
@Override
public void onDestroy() {
super.onDestroy();
JLog.d(TAG, "onDestroy");
decodeOver = true;
encoding = false;
}
private void init(String srcPath, String dstpath) {
MediaMetadataRetriever mmr = new MediaMetadataRetriever();
mmr.setDataSource(srcPath);
try {
srcWidth = Integer.parseInt(mmr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH));
srcHeight = Integer.parseInt(mmr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT));
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
}
try {
extractor = new MediaExtractor();
extractor.setDataSource(srcPath);
String mime = null;
for (int i = 0; i < extractor.getTrackCount(); i++) {
//獲取碼流的詳細格式/配置信息
MediaFormat format = extractor.getTrackFormat(i);
mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
videoTrackIndex = i;
this.format = format;
} else if (mime.startsWith("audio/")) {
continue;
} else {
continue;
}
}
extractor.selectTrack(videoTrackIndex); //選擇讀取視頻數(shù)據(jù)
//創(chuàng)建合成器
srcWidth = format.getInteger(MediaFormat.KEY_WIDTH);
dstHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
videoMaxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
videoDuration = format.getLong(MediaFormat.KEY_DURATION);
//videoRotation = format.getInteger(MediaFormat.KEY_ROTATION);
videoRotation = 90;//低版本不支持獲取旋轉,手動寫入了
if (videoRotation == 90) {
dstWidth = srcHeight;
dstHeight = srcWidth;
} else if (videoRotation == 0) {
dstWidth = srcWidth;
dstHeight = srcHeight;
}
mMax = (int) (videoDuration / 1000);
//int bit = this.format.getInteger(MediaFormat.KEY_BIT_RATE);
JLog.d(tag, "videoWidth=" + srcWidth + ",videoHeight=" + srcHeight + ",videoMaxInputSize=" + videoMaxInputSize + ",videoDuration=" + videoDuration + ",videoRotation=" + videoRotation);
//寫入文件的合成器
muxer = new MediaMuxer(dstpath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
//向合成器添加視頻軌
//videoTrackIndex = muxer.addTrack(format);
MediaCodec.BufferInfo videoInfo = new MediaCodec.BufferInfo();
videoInfo.presentationTimeUs = 0;
initMediaDecode(mime);
initMediaEncode(mime);
} catch (IOException e) {
e.printStackTrace();
}
}
//抽出每一幀
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void extract() {
int inputIndex = mediaDecode.dequeueInputBuffer(-1);//獲取可用的inputBuffer -1代表一直等待,0表示不等待 建議-1,避免丟幀
if (inputIndex < 0) {
JLog.d("px", "=========== code over =======");
return;
}
ByteBuffer inputBuffer = decodeInputBuffers[inputIndex];//拿到inputBuffer
inputBuffer.clear();
int length = extractor.readSampleData(inputBuffer, 0); //讀取一幀數(shù)據(jù),放到解碼隊列
if (length < 0) {
JLog.d("px", "extract Over");
decodeOver = true;
return;
} else {
//獲取時間戳
long presentationTimeUs = extractor.getSampleTime();
MediaCodec.BufferInfo videoInfo = new MediaCodec.BufferInfo();
videoInfo.offset = 0;
videoInfo.size = length;
//獲取幀類型,只能識別是否為I幀
videoInfo.flags = extractor.getSampleFlags();
videoInfo.presentationTimeUs = extractor.getSampleTime();
//解碼視頻
decode(videoInfo, inputIndex);
extractor.advance(); //移動到下一幀
}
}
private void handleFrameData(byte[] data, MediaCodec.BufferInfo info) {
//YUV420sp轉RGB數(shù)據(jù) 5-60ms
ByteArrayOutputStream out = new ByteArrayOutputStream();
YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, srcWidth, srcHeight, null);
yuvImage.compressToJpeg(new Rect(0, 0, srcWidth, srcHeight), 100, out);
byte[] imageBytes = out.toByteArray();
//旋轉圖像 20-50ms
Bitmap image = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
Bitmap bitmap = rotaingImageView(videoRotation, image);
image.recycle();
//渲染文字 0-1ms
Canvas canvas = new Canvas(bitmap);
canvas.drawText(videoTimeFormat.format(mVideo.videoCreateTime + info.presentationTimeUs / 1000), 10, 30, paint);
//通知進度 0-5ms
mProgress = (int) (info.presentationTimeUs / 1000);
if (mListener != null) {
mListener.onProgress(mProgress, mMax);
}
synchronized (MediaCodec.class) {//記得加鎖
timeDataContainer.add(new Frame(info, bitmap));
}
}
public static byte[] getNV21(int width, int height, Bitmap scaled) {
int[] argb = new int[width * height];
scaled.getPixels(argb, 0, width, 0, 0, width, height);
byte[] yuv = new byte[width * height * 3 / 2];
encodeYUV420SP(yuv, argb, width, height);
scaled.recycle();
return yuv;
}
/**
* 將bitmap里得到的argb數(shù)據(jù)轉成yuv420sp格式
* 這個yuv420sp數(shù)據(jù)就可以直接傳給MediaCodec,通過AvcEncoder間接進行編碼
*
* @param yuv420sp 用來存放yuv420sp數(shù)據(jù)
* @param argb 傳入argb數(shù)據(jù)
* @param width 圖片width
* @param height 圖片height
*/
public static void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
// a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
// well known RGB to YUV algorithm
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
// NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other
// pixel AND every other scanline.
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
}
index++;
}
}
}
/**
* 獲取夾了時間戳的的數(shù)據(jù)
*
* @return
*/
private Frame getFrameData() {
synchronized (MediaCodec.class) {//記得加鎖
if (timeDataContainer.isEmpty()) {
return null;
}
//從隊列中獲取數(shù)據(jù)
Frame frame = timeDataContainer.remove(0);////取出后將此數(shù)據(jù)remove掉 既能保證PCM數(shù)據(jù)塊的取出順序 又能及時釋放內存
//轉回YUV420sp 120-160ms
frame.data = getNV21(dstWidth, dstHeight, frame.bitmap);
return frame;
}
}
/*
* 旋轉圖片
* @param angle
* @param bitmap
* @return Bitmap
*/
public Bitmap rotaingImageView(int angle, Bitmap bitmap) {
//旋轉圖片 動作
Matrix matrix = new Matrix();
matrix.postRotate(angle);
// 創(chuàng)建新的圖片
return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
}
/**
* 初始化解碼器
*/
private void initMediaDecode(String mime) {
try {
//創(chuàng)建解碼器
mediaDecode = MediaCodec.createDecoderByType(mime);
mediaDecode.configure(format, null, null, 0);
} catch (IOException e) {
e.printStackTrace();
}
if (mediaDecode == null) {
JLog.e(tag, "create mediaDecode failed");
return;
}
mediaDecode.start();
decodeInputBuffers = mediaDecode.getInputBuffers();
decodeOutputBuffers = mediaDecode.getOutputBuffers();
decodeBufferInfo = new MediaCodec.BufferInfo();//用于描述解碼得到的byte[]數(shù)據(jù)的相關信息
}
/**
* 初始化編碼器
*/
private void initMediaEncode(String mime) {
try {
MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, dstWidth, dstHeight);
format.setInteger(MediaFormat.KEY_BIT_RATE, 1024 * 512);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 27);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
// format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mediaEncode = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
mediaEncode.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (IOException e) {
e.printStackTrace();
}
if (mediaEncode == null) {
JLog.e(tag, "create mediaEncode failed");
return;
}
mediaEncode.start();
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void decode(MediaCodec.BufferInfo videoInfo, int inputIndex) {
mediaDecode.queueInputBuffer(inputIndex, 0, videoInfo.size, videoInfo.presentationTimeUs, videoInfo.flags);//通知MediaDecode解碼剛剛傳入的數(shù)據(jù)
//獲取解碼得到的byte[]數(shù)據(jù) 參數(shù)BufferInfo上面已介紹 10000同樣為等待時間 同上-1代表一直等待,0代表不等待。此處單位為微秒
//此處建議不要填-1 有些時候并沒有數(shù)據(jù)輸出,那么他就會一直卡在這 等待
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputIndex = mediaDecode.dequeueOutputBuffer(bufferInfo, 50000);
switch (outputIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
JLog.d(tag, "INFO_OUTPUT_BUFFERS_CHANGED");
decodeOutputBuffers = mediaDecode.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
JLog.d(tag, "New format " + mediaDecode.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
JLog.d(tag, "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer outputBuffer;
byte[] frame;
while (outputIndex >= 0) {//每次解碼完成的數(shù)據(jù)不一定能一次吐出 所以用while循環(huán),保證解碼器吐出所有數(shù)據(jù)
outputBuffer = decodeOutputBuffers[outputIndex];//拿到用于存放PCM數(shù)據(jù)的Buffer
frame = new byte[bufferInfo.size];//BufferInfo內定義了此數(shù)據(jù)塊的大小
outputBuffer.get(frame);//將Buffer內的數(shù)據(jù)取出到字節(jié)數(shù)組中
outputBuffer.clear();//數(shù)據(jù)取出后一定記得清空此Buffer MediaCodec是循環(huán)使用這些Buffer的,不清空下次會得到同樣的數(shù)據(jù)
handleFrameData(frame, videoInfo);//自己定義的方法,供編碼器所在的線程獲取數(shù)據(jù),下面會貼出代碼
mediaDecode.releaseOutputBuffer(outputIndex, false);//此操作一定要做,不然MediaCodec用完所有的Buffer后 將不能向外輸出數(shù)據(jù)
outputIndex = mediaDecode.dequeueOutputBuffer(decodeBufferInfo, 50000);//再次獲取數(shù)據(jù),如果沒有數(shù)據(jù)輸出則outputIndex=-1 循環(huán)結束
}
break;
}
}
/**
* 編碼
*/
private void encode() {
//獲取解碼器所在線程輸出的數(shù)據(jù)
byte[] chunkTime;
Frame frame = getFrameData();
if (frame == null) {
return;
}
chunkTime = frame.data;
int inputIndex = mediaEncode.dequeueInputBuffer(-1);//同解碼器
if (inputIndex < 0) {
JLog.d("px", "dequeueInputBuffer return inputIndex " + inputIndex + ",then break");
mediaEncode.signalEndOfInputStream();
}
ByteBuffer inputBuffer = mediaEncode.getInputBuffers()[inputIndex];//同解碼器
inputBuffer.clear();//同解碼器
inputBuffer.put(chunkTime);//PCM數(shù)據(jù)填充給inputBuffer
inputBuffer.limit(frame.videoInfo.size);
mediaEncode.queueInputBuffer(inputIndex, 0, chunkTime.length, frame.videoInfo.presentationTimeUs, frame.videoInfo.flags);//通知編碼器 編碼
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputIndex = mediaEncode.dequeueOutputBuffer(bufferInfo, 50000);//同解碼器
switch (outputIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
JLog.d(tag, "INFO_OUTPUT_BUFFERS_CHANGED");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat outputFormat = mediaEncode.getOutputFormat();
outputFormat.setInteger(MediaFormat.KEY_ROTATION, videoRotation);
JLog.d(tag, "mediaEncode find New format " + outputFormat);
//向合成器添加視頻軌
videoTrackIndex = muxer.addTrack(outputFormat);
muxer.start();
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
JLog.d(tag, "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer outputBuffer;
while (outputIndex >= 0) {//同解碼器
outputBuffer = mediaEncode.getOutputBuffers()[outputIndex];//拿到輸出Buffer
muxer.writeSampleData(videoTrackIndex, outputBuffer, bufferInfo);
// JLog.d("px", "writeSampleData:" + bufferInfo.size);
mediaEncode.releaseOutputBuffer(outputIndex, false);
outputIndex = mediaEncode.dequeueOutputBuffer(bufferInfo, 50000);
}
break;
}
}
private void release() {
//全部寫完后釋放MediaMuxer和MediaExtractor
extractor.release();
mediaDecode.release();
mediaEncode.release();
muxer.stop();
muxer.release();
}
private DecodeRunnable decodeRunnable;
private EncodeRunnable encodeRunnable;
/**
* 解碼線程
*/
private class DecodeRunnable extends Thread {
@Override
public void run() {
decodeOver = false;
while (!decodeOver) {
try {
extract();
} catch (Exception e) {
//抓住刪除文件造成的異常
JLog.e("px", e.toString());
}
synchronized (encodeRunnable) {
encodeRunnable.notify();
}
}
}
}
/**
* 編碼線程
*/
private class EncodeRunnable extends Thread {
@Override
public void run() {
encoding = true;
while (encoding) {
if (timeDataContainer.isEmpty()) {
if (decodeOver) {//解碼完成,緩存也清空了
break;
}
try {
synchronized (encodeRunnable) {
wait();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
encode();
}
}
release();
encoding = false;
handler.sendEmptyMessage(-2);//發(fā)送消息完成任務
}
}
android.os.Handler handler = new android.os.Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case -2:
onComplete();
break;
default:
break;
}
}
};
public void onComplete() {
if (mDelete) {//delete請求,是在cancel事件前提下
mDelete = false;
new File(mVideo.srcPath).delete();//主動請求刪除,刪源文件,和數(shù)據(jù)庫
codecDao.deleteItem(mVideo);
JLog.d("px", "delete file " + mVideo.srcPath);
} else {
mVideo.finish = mCancel ? 0 : 100;
codecDao.createOrUpdate(mVideo);//更新數(shù)據(jù)庫狀態(tài)為已完成,或閑置中
}
if (mCancel) {//中途取消
mCancel = false;
new File(mVideo.dstPath).delete();//取消,刪除目標文件
JLog.d("px", "delete file " + mVideo.dstPath);
} else {//順利完成
new File(mVideo.srcPath).delete();//成功,刪除源文件
JLog.d("px", "delete file " + mVideo.srcPath);
}
if (mListener != null) {
mListener.onCodecFinish(mVideo);
}
if (!videos.isEmpty()) {
VideoCodecModel video = videos.remove(0);
start(video);
}
}
class Frame {
MediaCodec.BufferInfo videoInfo;
byte[] data;
Bitmap bitmap;
public Frame(MediaCodec.BufferInfo videoInfo, Bitmap bitmap) {
this.videoInfo = videoInfo;
this.bitmap = bitmap;
}
}
private long getInterval() {
//用第一二幀獲取幀間隔
long videoSampleTime;
ByteBuffer buffer = ByteBuffer.allocate(1024 * 512);
//獲取源視頻相鄰幀之間的時間間隔。(1)
extractor.readSampleData(buffer, 0);
//skip first I frame
if (extractor.getSampleFlags() == MediaExtractor.SAMPLE_FLAG_SYNC)
extractor.advance();
extractor.readSampleData(buffer, 0);
long firstVideoPTS = extractor.getSampleTime();
extractor.advance();
extractor.readSampleData(buffer, 0);
long SecondVideoPTS = extractor.getSampleTime();
videoSampleTime = Math.abs(SecondVideoPTS - firstVideoPTS);
JLog.d(tag, "videoSampleTime is " + videoSampleTime);
return videoSampleTime;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
JLog.d(TAG, "onStartCommand");
super.onStartCommand(intent, flags, startId);
if (intent == null) {
return START_NOT_STICKY;
}
int action = intent.getIntExtra("action", 0);
if (action == REQUEST_CODEC) {
VideoCodecModel video = (VideoCodecModel) intent.getSerializableExtra("video");
video = codecDao.addItem(video);
if (!encoding) {
start(video);
} else {
videos.add(video);
}
} else if (action == REQUEST_CODEC_CANCEL) {
VideoCodecModel video = (VideoCodecModel) intent.getSerializableExtra("video");
mDelete = intent.getBooleanExtra("delete", false);//是否刪除舊文件
JLog.d("px", "----- onStartCommand action " + action + " is delete?" + mDelete);
mBinder.cancel(video);
}
return START_NOT_STICKY;
}
@Nullable
@Override
public IBinder onBind(Intent intent) {
JLog.d(TAG, "onBind");
return mBinder;
}
private CodecBinder mBinder = new CodecBinder();
private VideoCodecModel mVideo;
//video下載的任務隊列
private List<VideoCodecModel> videos = new ArrayList<>();
public static final int REQUEST_CODEC = 0x183;
public static final int REQUEST_CODEC_CANCEL = 0x184;
public class CodecBinder extends Binder {
/**
* @param video
* @return 是否可以執(zhí)行, 或等待執(zhí)行
*/
public boolean start(VideoCodecModel video) {
video = codecDao.addItem(video);
if (!encoding) {
TestCodecService.this.start(video);
} else {
videos.add(video);
}
return !encoding;
}
public void setOnProgressChangeListener(OnProgressChangeListener l) {
mListener = l;
}
public VideoCodecModel getCurrentVideo() {
return mVideo;
}
public void cancel(VideoCodecModel video) {
if (mVideo.equals(video)) {//正在處理
decodeOver = true;//控制解碼線程結束
encoding = false;//控制編碼線程結束
mCancel = true;//控制結束后刪除文件等
} else {//視頻沒有正在處理
boolean flag = videos.remove(video);
if (flag) {
JLog.d("px", "cancel render task sucess");
} else {
//并沒有這個任務
JLog.d("px", "cancel render task fail,seems this video not in renderring queen");
}
//刪除源文件
if (mDelete) {
mDelete = false;
new File(video.srcPath).delete();
codecDao.deleteItem(video);
}
}
}
public List<VideoCodecModel> getVideoList() {
return videos;
}
public void removeListener() {
mListener = null;
}
}
private void start(VideoCodecModel video) {
if (video == null) {
return;
}
if (!new File(video.srcPath).exists()) {
Toast.makeText(this, "該視頻緩存文件可能已經被刪除", Toast.LENGTH_SHORT).show();
video.finish = -100;
codecDao.createOrUpdate(video);
return;
}
mVideo = video;
if (mListener != null) {
mListener.onCodecStart(mVideo);
}
mVideo.finish = 50;//改成處理中
codecDao.createOrUpdate(mVideo);
Runnable runnable = new Runnable() {
@Override
public void run() {
init(mVideo.srcPath, mVideo.dstPath);
decodeRunnable = new DecodeRunnable();
decodeRunnable.start();
encodeRunnable = new EncodeRunnable();
encodeRunnable.start();
}
};
AsyncTaskExecutor.getExecutor().execute(runnable);
}
private OnProgressChangeListener mListener;
public interface OnProgressChangeListener {
void onProgress(int progress, int max);
void onCodecStart(VideoCodecModel video);
void onCodecFinish(VideoCodecModel video);
}
}
//這是模型類
import com.j256.ormlite.field.DatabaseField;
import com.j256.ormlite.table.DatabaseTable;
import java.io.Serializable;
/**
* Created by user on 2016/8/29.
*/
@DatabaseTable(tableName = "video_codec_task")
public class VideoCodecModel implements Serializable {
private static final long serialVersionUID = -1307249622002520298L;
@DatabaseField
public String srcPath;
@DatabaseField
public String dstPath;
@DatabaseField
public long videoCreateTime;
@DatabaseField(generatedId = true)
public int id;
//0為被限制的狀態(tài),50為渲染中,或渲染隊列中,100為已完成,-100為已刪除,
@DatabaseField
public int finish = 0;
@DatabaseField
public String serno;
//操作是用到,不需要存數(shù)據(jù)庫
public boolean select;
public VideoCodecModel(String srcPath, String dstPath, long videoCreateTime) {
this.srcPath = srcPath;
this.videoCreateTime = videoCreateTime;
this.dstPath = dstPath;
}
public VideoCodecModel() {
}
public String getSrcPath() {
return srcPath;
}
public void setSrcPath(String srcPath) {
this.srcPath = srcPath;
}
public String getDstPath() {
return dstPath;
}
public void setDstPath(String dstPath) {
this.dstPath = dstPath;
}
public long getVideoCreateTime() {
return videoCreateTime;
}
public void setVideoCreateTime(long videoCreateTime) {
this.videoCreateTime = videoCreateTime;
}
public boolean isSelect() {
return select;
}
public void setSelect(boolean select) {
this.select = select;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof VideoCodecModel)) return false;
VideoCodecModel that = (VideoCodecModel) o;
if (videoCreateTime != that.videoCreateTime) return false;
if (!srcPath.equals(that.srcPath)) return false;
return dstPath.equals(that.dstPath);
}
}
//用來查看水印任務完成狀態(tài),和監(jiān)控Service運行的界面Activity,,Activity的打開與否,不影響服務的運行
import android.annotation.TargetApi;
import android.app.ProgressDialog;
import android.content.ComponentName;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.ServiceConnection;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.os.IBinder;
import android.os.Message;
import android.support.annotation.Nullable;
import android.util.Log;
import android.view.Gravity;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.PopupMenu;
import android.widget.ProgressBar;
import android.widget.TextView;
import ...
import java.io.File;
import java.lang.ref.WeakReference;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* Created by user on 2016/8/29.
*/
public class ShowCodecActivity extends BaseActivity implements TestCodecService.OnProgressChangeListener, View.OnClickListener {
private TextView noneTipsView;
private List<VideoCodecModel> videos = new ArrayList<>(), cordingVideos;
private ListView listView;
private BaseAdapter adapter;
private View firstTips;
@Nullable
VideoCodecModel curShowVideo, curRenderVideo;
TestCodecService.CodecBinder binder;
private ProgressBar progressBar;
ServiceConnection connection;
VideoCodecDao codecDao;
private SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
private boolean mEditMode = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_show_codec);
setTitle("簽約視頻列表");
initView();
if (getIntent() != null) {
curShowVideo = (VideoCodecModel) getIntent().getSerializableExtra("video");
}
codecDao = VideoCodecDao.getInstance(this);
final Intent intent = new Intent(this, TestCodecService.class);
connection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
Log.d("px", "onServiceConnected");
binder = (TestCodecService.CodecBinder) service;
binder.setOnProgressChangeListener(ShowCodecActivity.this);
videos.clear();
curRenderVideo = binder.getCurrentVideo();
cordingVideos = binder.getVideoList();
videos.addAll(codecDao.queryAll());
notifyChange();
}
@Override
public void onServiceDisconnected(ComponentName name) {
}
};
bindService(intent, connection, Context.BIND_AUTO_CREATE);
}
private void notifyChange() {
if (adapter == null) {
adapter = new BaseAdapter() {
@Override
public int getCount() {
return videos.size();
}
@Override
public VideoCodecModel getItem(int position) {
return videos.get(position);
}
@Override
public long getItemId(int position) {
return 0;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final Holder holder;
if (convertView == null) {
convertView = View.inflate(ShowCodecActivity.this, R.layout.item_show_codec, null);
holder = new Holder();
holder.bar = (ProgressBar) convertView.findViewById(R.id.pb_codec);
holder.status = (TextView) convertView.findViewById(R.id.status);
holder.serno = (TextView) convertView.findViewById(R.id.serno);
holder.select = convertView.findViewById(R.id.select);
holder.time = (TextView) convertView.findViewById(R.id.time);
holder.operate = (TextView) convertView.findViewById(R.id.operate);
holder.checkBox = (CheckBox) convertView.findViewById(R.id.cb_select);
convertView.setTag(holder);
} else {
holder = (Holder) convertView.getTag();
}
final VideoCodecModel video = getItem(position);
if (video.finish == 100) {
holder.status.setText("已完成");
holder.operate.setVisibility(View.VISIBLE);
holder.operate.setText("操作");
} else if (video.finish == -100) {
holder.status.setText("已刪除");
holder.operate.setVisibility(View.INVISIBLE);
} else if (video.equals(curRenderVideo)) {
progressBar = holder.bar;
holder.status.setText("處理中");
holder.operate.setVisibility(View.INVISIBLE);
} else if (cordingVideos.contains(video)) {
holder.status.setText("等待中");
holder.operate.setVisibility(View.VISIBLE);
holder.operate.setText("取消");
} else {
holder.status.setText("未處理");
holder.operate.setVisibility(View.VISIBLE);
holder.operate.setText("開始");
}
holder.operate.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (video.finish == 100) {
operate(holder.status, video);
} else if (video.finish == -100) {
return;
} else if (video.equals(curRenderVideo)) {//已經在編碼中,不可操作
return;
} else if (cordingVideos.contains(video)) {//已經在編碼隊列中,可取消
binder.cancel(video);
holder.status.setText("未處理");
holder.operate.setVisibility(View.VISIBLE);
holder.operate.setText("開始");
} else {
boolean immedia = binder.start(video);
if (immedia) {
holder.status.setText("處理中");
holder.operate.setVisibility(View.INVISIBLE);
} else {
holder.status.setText("等待中");
holder.operate.setVisibility(View.VISIBLE);
holder.operate.setText("取消");
}
}
}
});
holder.select.setVisibility(video.equals(curShowVideo) ? View.VISIBLE : View.GONE);
holder.serno.setText(video.serno);
holder.time.setText(dateFormat.format(new Date(video.videoCreateTime)));
holder.checkBox.setVisibility(mEditMode ? View.VISIBLE : View.GONE);
holder.checkBox.setChecked(video.isSelect());
holder.checkBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
video.setSelect(isChecked);
}
});
return convertView;
}
};
listView.setAdapter(adapter);
} else {
adapter.notifyDataSetChanged();
}
noneTipsView.setVisibility(videos.isEmpty() ? View.VISIBLE : View.GONE);
more.setVisibility(mEditMode ? View.VISIBLE : View.GONE);
back.setVisibility(mEditMode ? View.INVISIBLE : View.VISIBLE);
checkBox.setVisibility(mEditMode ? View.VISIBLE : View.GONE);
}
class Holder {
ProgressBar bar;
TextView status, serno, time, operate;
View select;
CheckBox checkBox;
}
private void initView() {
listView = (ListView) findViewById(R.id.lv_codec);
noneTipsView = (TextView) findViewById(R.id.tv_none);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
VideoCodecModel video = videos.get(position);
operate(view, video);
}
});
listView.setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() {
@Override
public boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id) {
if (mEditMode) {
return false;
}
mEditMode = true;
//啟動編輯模式不記住從前的選中狀態(tài)
for (VideoCodecModel video : videos) {
if (video.select)
video.select = false;
}
checkBox.setChecked(false);
notifyChange();
return true;
}
});
firstTips = findViewById(R.id.ll_tips);
boolean visable = Preferences.getBoolean("firstShowCodec", true);
firstTips.setVisibility(visable ? View.VISIBLE : View.GONE);
if (visable)
findViewById(R.id.btn_noshow).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Preferences.put("firstShowCodec", false);
firstTips.setVisibility(View.GONE);
}
});
checkBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
for (VideoCodecModel model : videos) {
model.setSelect(isChecked);
}
notifyChange();
}
});
more.setText("操作");
more.setOnClickListener(this);
}
private void operate(View view, final VideoCodecModel video) {
if (video.finish != 100) {
return;
}
PopupMenu popupMenu = new PopupMenu(ShowCodecActivity.this, view);
popupMenu.getMenu().add(1, 0, 0, "預覽或發(fā)送");
popupMenu.getMenu().add(1, 1, 1, "刪除");
popupMenu.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
switch (item.getItemId()) {
case 0:
previewVideo(video.dstPath);
break;
case 1:
File file = new File(video.dstPath);
if (file.exists()) {
file.delete();
}
codecDao.deleteItem(video);
videos.remove(video);
if (cordingVideos.contains(video)) {
binder.cancel(video);
}
notifyChange();
break;
}
return true;
}
});
popupMenu.show();
}
@Override
public void onProgress(int progress, int max) {
if (progressBar != null) {
progressBar.setMax(max);
progressBar.setProgress(progress);
}
}
@Override
public void onCodecStart(VideoCodecModel video) {
JLog.d("px", "onCodecStart");
curRenderVideo = video;
int index = videos.indexOf(video);
if (index >= 0) {
View child = listView.getChildAt(index);
Holder holder = (Holder) child.getTag();
holder.status.setText("處理中");
holder.operate.setVisibility(View.INVISIBLE);
progressBar = holder.bar;
}
}
@Override
public void onCodecFinish(VideoCodecModel video) {
JLog.d("px", "onCodecFinish");
if (progressBar != null) {
progressBar.setProgress(0);
}
int index = videos.indexOf(video);
videos.get(index).finish = 100;
if (index >= 0) {
View child = listView.getChildAt(index);
Holder holder = (Holder) child.getTag();
holder.status.setText("已完成");
holder.operate.setVisibility(View.VISIBLE);
holder.operate.setText("操作");
progressBar = null;
}
}
@Override
protected void onDestroy() {
if (binder != null)
binder.removeListener();
unbindService(connection);
super.onDestroy();
}
private void previewVideo(String filePath) {
//預覽錄像
Intent intent = new Intent(Intent.ACTION_VIEW);
String type = "video/mp4";
Uri uri = Uri.parse("file://" + filePath);
intent.setDataAndType(uri, type);
startActivity(intent);
}
@Override
public void onBackPressed() {
if (mEditMode) {
mEditMode = false;
notifyChange();
return;
}
super.onBackPressed();
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.more:
PopupMenu menu = new PopupMenu(this, v);
// menu.getMenu().add(1, 0, 0, "發(fā)送");
menu.getMenu().add(1, 1, 1, "刪除");
menu.getMenu().add(1, 2, 2, "取消");
menu.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
switch (item.getItemId()) {
case 0:
break;
case 1:
deleteSelect();
break;
case 2:
mEditMode = false;
notifyChange();
break;
}
return true;
}
});
menu.show();
break;
}
}
//刪除所選
private void deleteSelect() {
final ProgressDialog dialog = ProgressDialog.show(this, null, null);
AsyncTask<String, String, Boolean> task = new AsyncTask<String, String, Boolean>() {
@Override
protected Boolean doInBackground(String... params) {
boolean has = false;//是否選到可以刪除的,有可能并未有任何選擇
for (VideoCodecModel video : videos) {
if (video.select) {
File file;
if (video.finish == 100) {
file = new File(video.dstPath);
} else {
file = new File(video.srcPath);
}
if (file.exists()) {
file.delete();
}
codecDao.deleteItem(video);
if (!has) {
has = true;
}
}
}
if (has) {
videos.clear();
videos.addAll(codecDao.queryAll());
}
return has;
}
@Override
protected void onPostExecute(Boolean s) {
mEditMode = false;
notifyChange();
dialog.dismiss();
}
};
task.executeOnExecutor(AsyncTaskExecutor.getExecutor());
}
}
以上就是本文的全部內容,希望對大家的學習有所幫助,也希望大家多多支持腳本之家。
相關文章
DrawerLayout結合Tollbar實現(xiàn)菜單側滑效果
這篇文章主要為大家詳細介紹了DrawerLayout結合Tollbar實現(xiàn)菜單側滑效果,具有一定的參考價值,感興趣的小伙伴們可以參考一下2017-12-12
探討Android 的屏幕滾動操作不如 iPhone 流暢順滑的原因
雖然很多Android手機的配置都比iPhone要高,比如大多數(shù)Andorid手機的內存都有1GB,而iPhone 4S只有512MB內存,但用過iPhone的人都知道Android手機在使用的時候總感覺沒有那么順滑,究竟為什么會出現(xiàn)這種現(xiàn)象呢?2014-07-07
Android BadTokenException異常解決案例詳解
這篇文章主要介紹了Android BadTokenException異常詳解,小編覺得挺不錯的,現(xiàn)在分享給大家,也給大家做個參考。一起跟隨小編過來看看吧2021-08-08
Android程序開發(fā)之ListView實現(xiàn)橫向滾動(帶表頭與固定列)
這篇文章主要介紹了Android程序開發(fā)之ListView實現(xiàn)橫向滾動(帶表頭與固定列)的相關資料,非常不錯,具有參考借鑒價值,需要的朋友可以參考下2016-07-07
Android圖片選擇器ImageEditContainer
這篇文章主要為大家詳細介紹了Android圖片選擇器ImageEditContainer的相關資料,具有一定的參考價值,感興趣的小伙伴們可以參考一下2017-07-07
Android 安全加密:數(shù)字簽名和數(shù)字證書詳解
本文主要介紹Android 安全加密數(shù)字簽名和數(shù)字證書的資料,這里整理詳細的資料及數(shù)字簽名和數(shù)字證書應用詳解,有需要的小伙伴可以參考下2016-09-09

