JavaCV使用ffmpeg實現(xiàn)錄屏功能
今天突發(fā)奇想,想自己寫一個錄屏的軟件,上次寫了一個專門錄音的Demo,但是要把聲音和視頻放到一起合成一個mp4文件,著實有一點艱難,所以就打算使用ffmpeg來寫一個,而這篇博客中會順便談一談我碰到的各種坑。
ffmpeg是一個c++程序,要想在java中使用ffmpeg,無非就是兩種方式:直接在java程序中調(diào)用ffmpeg.exe,還有就是通過jni的方式。而在這里我就是使用jni的方式,但是我在這里直接使用javacv這個框架來實現(xiàn)就可以,用這個的好處就是你什么都不要干,直接導(dǎo)入幾個重要的jar包就可以。
步驟
首先呢,下載javacv就可以
你也可以直接在我這里下載
然后下載好了,就要開始導(dǎo)包了,導(dǎo)入javacpp,javacv-platform,javacv,這三個一定要導(dǎo),另外要能使用ffmpeg的API實現(xiàn)錄屏就要再導(dǎo)入ffmpeg和videoinput。
導(dǎo)完包之后代碼測試一下,這里發(fā)一個別人寫的代碼,可以實現(xiàn)錄屏,但不能錄音,代碼里面需要修改一下存放文件的路徑:
實現(xiàn)代碼
package com; import java.awt.AWTException; import java.awt.Graphics2D; import java.awt.Rectangle; import java.awt.RenderingHints; import java.awt.Robot; import java.awt.Toolkit; import java.awt.image.BufferedImage; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.ShortBuffer; import java.util.Scanner; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import javax.sound.sampled.AudioFormat; import javax.sound.sampled.AudioSystem; import javax.sound.sampled.DataLine; import javax.sound.sampled.LineUnavailableException; import javax.sound.sampled.TargetDataLine; import org.bytedeco.ffmpeg.global.avcodec; import org.bytedeco.ffmpeg.global.avutil; import org.bytedeco.javacv.FFmpegFrameRecorder; import org.bytedeco.javacv.Frame; import org.bytedeco.javacv.FrameRecorder.Exception; import org.bytedeco.javacv.Java2DFrameConverter; /** * 使用javacv進行錄屏 * */ public class VideoRecord { //線程池 screenTimer private ScheduledThreadPoolExecutor screenTimer; //獲取屏幕尺寸 private final Rectangle rectangle = new Rectangle(Constant.WIDTH, Constant.HEIGHT); // 截屏的大小 //視頻類 FFmpegFrameRecorder private FFmpegFrameRecorder recorder; private Robot robot; //線程池 exec private ScheduledThreadPoolExecutor exec; private TargetDataLine line; private AudioFormat audioFormat; private DataLine.Info dataLineInfo; private boolean isHaveDevice = true; private long startTime = 0; private long videoTS = 0; private long pauseTime = 0; private double frameRate = 5; public VideoRecord(String fileName, boolean isHaveDevice) { // TODO Auto-generated constructor stub recorder = new FFmpegFrameRecorder(fileName + ".mp4", Constant.WIDTH, Constant.HEIGHT); // recorder.setVideoCodec(avcodec.AV_CODEC_ID_H265); // 28 // recorder.setVideoCodec(avcodec.AV_CODEC_ID_FLV1); // 28 recorder.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4); // 13 recorder.setFormat("mp4"); // recorder.setFormat("mov,mp4,m4a,3gp,3g2,mj2,h264,ogg,MPEG4"); recorder.setSampleRate(44100); recorder.setFrameRate(frameRate); recorder.setVideoQuality(0); recorder.setVideoOption("crf", "23"); // 2000 kb/s, 720P視頻的合理比特率范圍 recorder.setVideoBitrate(1000000); /** * 權(quán)衡quality(視頻質(zhì)量)和encode speed(編碼速度) values(值): ultrafast(終極快),superfast(超級快), * veryfast(非???, faster(很快), fast(快), medium(中等), slow(慢), slower(很慢), * veryslow(非常慢) * ultrafast(終極快)提供最少的壓縮(低編碼器CPU)和最大的視頻流大?。欢鴙eryslow(非常慢)提供最佳的壓縮(高編碼器CPU)的同時降低視頻流的大小 * 參考:https://trac.ffmpeg.org/wiki/Encode/H.264 官方原文參考:-preset ultrafast as the * name implies provides for the fastest possible encoding. If some tradeoff * between quality and encode speed, go for the speed. This might be needed if * you are going to be transcoding multiple streams on one machine. */ recorder.setVideoOption("preset", "slow"); recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P); // yuv420p recorder.setAudioChannels(2); recorder.setAudioOption("crf", "0"); // Highest quality recorder.setAudioQuality(0); recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC); try { robot = new Robot(); } catch (AWTException e) { // TODO Auto-generated catch block e.printStackTrace(); } try { recorder.start(); } catch (Exception e) { // TODO Auto-generated catch block System.out.print("*******************************"); } this.isHaveDevice = isHaveDevice; } /** * 開始錄制 */ public void start() { if (startTime == 0) { startTime = System.currentTimeMillis(); } if (pauseTime == 0) { pauseTime = System.currentTimeMillis(); } // 如果有錄音設(shè)備則啟動錄音線程 if (isHaveDevice) { new Thread(new Runnable() { @Override public void run() { // TODO Auto-generated method stub caputre(); } }).start(); } // 錄屏 screenTimer = new ScheduledThreadPoolExecutor(1); screenTimer.scheduleAtFixedRate(new Runnable() { @Override public void run() { // 將screenshot對象寫入圖像文件 // try { // ImageIO.write(screenCapture, "JPEG", f); // videoGraphics.drawImage(screenCapture, 0, 0, null); // IplImage image = cvLoadImage(name); // 非常吃內(nèi)存??! // // 創(chuàng)建一個 timestamp用來寫入幀中 // videoTS = 1000 // * (System.currentTimeMillis() - startTime - (System.currentTimeMillis() - // pauseTime)); // // 檢查偏移量 // if (videoTS > recorder.getTimestamp()) { // recorder.setTimestamp(videoTS); // } BufferedImage screenCapture = robot.createScreenCapture(rectangle); // 截屏 BufferedImage videoImg = new BufferedImage(Constant.WIDTH, Constant.HEIGHT, BufferedImage.TYPE_3BYTE_BGR); // 聲明一個BufferedImage用重繪截圖 Graphics2D videoGraphics = videoImg.createGraphics();// 創(chuàng)建videoImg的Graphics2D videoGraphics.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_DISABLE); videoGraphics.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_SPEED); videoGraphics.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED); videoGraphics.drawImage(screenCapture, 0, 0, null); // 重繪截圖 Java2DFrameConverter java2dConverter = new Java2DFrameConverter(); Frame frame = java2dConverter.convert(videoImg); try { videoTS = 1000L * (System.currentTimeMillis() - startTime - (System.currentTimeMillis() - pauseTime)); // 檢查偏移量 if (videoTS > recorder.getTimestamp()) { recorder.setTimestamp(videoTS); } recorder.record(frame); // 錄制視頻 } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } // 釋放資源 videoGraphics.dispose(); videoGraphics = null; videoImg.flush(); videoImg = null; java2dConverter = null; screenCapture.flush(); screenCapture = null; } }, (int) (1000 / frameRate), (int) (1000 / frameRate), TimeUnit.MILLISECONDS); } /** * 抓取聲音 */ public void caputre() { audioFormat = new AudioFormat(44100.0F, 16, 2, true, false); dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat); try { line = (TargetDataLine) AudioSystem.getLine(dataLineInfo); } catch (LineUnavailableException e1) { // TODO Auto-generated catch block System.out.println("#################"); } try { line.open(audioFormat); } catch (LineUnavailableException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } line.start(); final int sampleRate = (int) audioFormat.getSampleRate(); final int numChannels = audioFormat.getChannels(); int audioBufferSize = sampleRate * numChannels; final byte[] audioBytes = new byte[audioBufferSize]; exec = new ScheduledThreadPoolExecutor(1); exec.scheduleAtFixedRate(new Runnable() { @Override public void run() { try { int nBytesRead = line.read(audioBytes, 0, line.available()); int nSamplesRead = nBytesRead / 2; short[] samples = new short[nSamplesRead]; // Let's wrap our short[] into a ShortBuffer and // pass it to recordSamples ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples); ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, nSamplesRead); // recorder is instance of // org.bytedeco.javacv.FFmpegFrameRecorder recorder.recordSamples(sampleRate, numChannels, sBuff); // System.gc(); } catch (org.bytedeco.javacv.FrameRecorder.Exception e) { e.printStackTrace(); } } }, (int) (1000 / frameRate), (int) (1000 / frameRate), TimeUnit.MILLISECONDS); } /** * 停止 */ public void stop() { if (null != screenTimer) { screenTimer.shutdownNow(); } try { recorder.stop(); recorder.release(); recorder.close(); screenTimer = null; // screenCapture = null; if (isHaveDevice) { if (null != exec) { exec.shutdownNow(); } if (null != line) { line.stop(); line.close(); } dataLineInfo = null; audioFormat = null; } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } /** * 暫停 * * @throws Exception */ public void pause() throws Exception { screenTimer.shutdownNow(); screenTimer = null; if (isHaveDevice) { exec.shutdownNow(); exec = null; line.stop(); line.close(); dataLineInfo = null; audioFormat = null; line = null; } pauseTime = System.currentTimeMillis(); } public static void main(String[] args) throws Exception, AWTException { VideoRecord videoRecord = new VideoRecord("C:\\Users\\Administrator\\Desktop\\視頻", false); videoRecord.start(); while (true) { System.out.println("你要停止嗎?請輸入(stop),程序會停止。"); Scanner sc = new Scanner(System.in); if (sc.next().equalsIgnoreCase("stop")) { videoRecord.stop(); System.out.println("停止"); } if (sc.next().equalsIgnoreCase("pause")) { videoRecord.pause(); System.out.println("暫停"); } if (sc.next().equalsIgnoreCase("start")) { videoRecord.start(); System.out.println("開始"); } } } } class Constant{ public final static int WIDTH=Toolkit.getDefaultToolkit().getScreenSize().width; public final static int HEIGHT=Toolkit.getDefaultToolkit().getScreenSize().height; }
到此這篇關(guān)于JavaCV使用ffmpeg實現(xiàn)錄屏功能的文章就介紹到這了,更多相關(guān)JavaCV ffmpeg錄屏內(nèi)容請搜索腳本之家以前的文章或繼續(xù)瀏覽下面的相關(guān)文章希望大家以后多多支持腳本之家!
相關(guān)文章
Springboot工具類FileCopyUtils使用教程
這篇文章主要介紹了Springboot內(nèi)置的工具類之FileCopyUtils的使用,文中通過示例代碼介紹的非常詳細,對大家的學(xué)習(xí)或者工作具有一定的參考學(xué)習(xí)價值,需要的朋友們下面隨著小編來一起學(xué)習(xí)吧2022-12-12