前两天准备录个视频,结果录屏大师开始收费了很尴尬,想一想学了这么久的java了,写一个工具练练手,因此就有了如下丑陋的一个工具,完成了基本的录屏和录制本地音频。今后有机会让它再进化进化,目前已经可以让我录别人的视频了。
package com.piziwang.frame;
import com.piziwang.service.VideoRecode;
import javafx.application.Application;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.image.ImageView;
import javafx.scene.input.*;
import javafx.scene.layout.*;
import javafx.scene.paint.Paint;
import javafx.stage.DirectoryChooser;
import javafx.stage.Stage;
import javafx.stage.StageStyle;
import java.awt.*;
import java.io.File;
public class MainFrame extends Application {
private Stage primary;
private Stage stage;
ImageView iv;
HBox hBox;
VideoRecode videoRecord;
double screenX_start;
double screenY_start;
double screenX_end;
double screenY_end;
double sceneX =100;
double sceneY =100;
String path="D://screenRecoding";//存储位置
@Override
public void start(Stage primaryStage) throws Exception {
primary = primaryStage;
AnchorPane root = new AnchorPane();
Button bu = new Button("选择录屏区域");
bu.setPrefWidth(180);
Button begin_recode_btn = new Button("开始录屏");
begin_recode_btn.setPrefWidth(180);
begin_recode_btn.setOnMouseClicked(new EventHandler<MouseEvent>() {
public void handle(MouseEvent event) {
beginRecodeVideo();
}
});
Button stop_recode_btn = new Button("结束录屏");
stop_recode_btn.setPrefWidth(180);
stop_recode_btn.setOnMouseClicked(new EventHandler<MouseEvent>() {
public void handle(MouseEvent event) {
stopRecodeVideo();
}
});
Button open_dir_btn = new Button("打开存放位置");
open_dir_btn.setPrefWidth(180);
Button choose_dir_btn = new Button("选择存储位置");
choose_dir_btn.setPrefWidth(180);
final Label choose_dir_label = new Label(path);
choose_dir_label.setPrefWidth(500);
choose_dir_label.setPrefHeight(50);
choose_dir_label.setBorder(new Border(new BorderStroke( Paint.valueOf("#292929"),BorderStrokeStyle.SOLID,new CornerRadii(5.0),new BorderWidths(2))));
choose_dir_btn.setOnMouseClicked(new EventHandler<MouseEvent>() {
public void handle(MouseEvent event) {
DirectoryChooser directoryChooser=new DirectoryChooser();
File file = directoryChooser.showDialog(stage);
try{
path = file.getPath();//选择的文件夹路径
}catch (Exception e){
path="D://screenRecoding";
}
choose_dir_label.setText(" "+path);
System.out.println(path);
}
});
root.getChildren().add(bu);
AnchorPane.setTopAnchor(bu,100.0);
AnchorPane.setLeftAnchor(bu,50.0);
root.getChildren().add(choose_dir_btn);
AnchorPane.setTopAnchor(choose_dir_btn,200.0);
AnchorPane.setLeftAnchor(choose_dir_btn,50.0);
root.getChildren().add(choose_dir_label);
AnchorPane.setTopAnchor(choose_dir_label,200.0);
AnchorPane.setLeftAnchor(choose_dir_label,250.0);
root.getChildren().add(begin_recode_btn);
AnchorPane.setTopAnchor(begin_recode_btn,300.0);
AnchorPane.setLeftAnchor(begin_recode_btn,50.0);
root.getChildren().add(stop_recode_btn);
AnchorPane.setTopAnchor(stop_recode_btn,300.0);
AnchorPane.setLeftAnchor(stop_recode_btn,250.0);
Scene scene = new Scene(root);
primaryStage.setTitle("java录屏工具");
primaryStage.setScene(scene);
primaryStage.setWidth(800);
primaryStage.setHeight(1000);
primaryStage.show();
bu.setOnAction(new EventHandler<ActionEvent>() {
public void handle(ActionEvent event) {
show();
}
});
//快捷键
KeyCombination key = KeyCombination.valueOf("ctrl+alt+p");
Mnemonic mc = new Mnemonic(bu, key);
scene.addMnemonic(mc);
}
public void show(){
primary.setIconified(true);
stage = new Stage();
AnchorPane root = new AnchorPane();
root.setBackground(Background.EMPTY);
Scene scene = new Scene(root);
scene.setFill(Paint.valueOf("#ffffee11"));
stage.setFullScreenExitHint("");
stage.initStyle(StageStyle.TRANSPARENT);
stage.setScene(scene);
stage.setFullScreen(true);
stage.show();
drag(root);
scene.setOnKeyPressed(new EventHandler<KeyEvent>() {
public void handle(KeyEvent event) {
if(event.getCode()== KeyCode.ESCAPE){
stage.close();
primary.setIconified(false);
}
}
});
}
/**
* 绘制带边框的矩形
*/
public void drag(final AnchorPane an){
an.setOnMousePressed(new EventHandler<MouseEvent>() {
public void handle(MouseEvent event) {
an.getChildren().clear();
hBox = new HBox();
hBox.setBackground(null);
hBox.setBorder(new Border(new BorderStroke( Paint.valueOf("#CD3700"),BorderStrokeStyle.SOLID,null,new BorderWidths(2))));
screenX_start = event.getScreenX();
screenY_start = event.getScreenY();
an.getChildren().add(hBox);
AnchorPane.setTopAnchor(hBox,screenY_start);
AnchorPane.setLeftAnchor(hBox,screenX_start);
System.out.println(event.getScreenX());
}
});
an.setOnDragDetected(new EventHandler<MouseEvent>() {
public void handle(MouseEvent event) {
an.startFullDrag();
}
});
an.setOnMouseDragOver(new EventHandler<MouseDragEvent>() {
public void handle(MouseDragEvent event) {
Label label = new Label();
label.setAlignment(Pos.CENTER);
label.setPrefWidth(320);
label.setPrefHeight(40);
an.getChildren().add(label);
AnchorPane.setTopAnchor(label,screenY_start-40);
AnchorPane.setLeftAnchor(label,screenX_start);
sceneX = event.getScreenX();
sceneY = event.getScreenY();
//System.out.println(sceneX);
double width = sceneX-screenX_start;
double height = sceneY-screenY_start;
hBox.setPrefWidth(width);
hBox.setPrefHeight(height);
label.setTextFill(Paint.valueOf("#CD3700"));
label.setStyle("-fx-background-color: dimgrey");
label.setText("宽度:"+width+" 高度:"+height);
}
});
an.setOnMouseDragExited(new EventHandler<MouseDragEvent>() {
public void handle(MouseDragEvent event) {
screenX_end = event.getScreenX();
screenY_end = event.getScreenY();
Button complete_btn = new Button("确认");
hBox.getChildren().add(complete_btn);
hBox.setAlignment(Pos.BOTTOM_RIGHT);
complete_btn.setOnAction(new EventHandler<ActionEvent>() {
public void handle(ActionEvent event) {
//返回到主界面
stage.close();
primary.setIconified(false);
}
});
}
});
}
public void beginRecodeVideo(){
if(videoRecord!=null){videoRecord.stop();}
int width = (int) (sceneX-screenX_start);
int height = (int)(sceneY-screenY_start);
Rectangle rectangle = new Rectangle((int)screenX_start,(int)screenY_start,width, height); // 截屏的大小
//最小化主窗口
primary.setIconified(true);
//调用录屏API
videoRecord = new VideoRecode(path, true,rectangle);
videoRecord.start();
}
public void stopRecodeVideo(){
videoRecord.stop();
}
public static void main(String[] args) {
launch(args);
}
}
package com.piziwang.service;
import org.bytedeco.javacpp.avcodec;
import org.bytedeco.javacpp.avutil;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.FrameRecorder.Exception;
import org.bytedeco.javacv.Java2DFrameConverter;
import javax.sound.sampled.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.util.Scanner;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* 使用javacv进行录屏
*/
public class VideoRecode {
//线程池 screenTimer
private ScheduledThreadPoolExecutor screenTimer;
//获取屏幕尺寸
private static final int WIDTH = 900;
private static final int HEIGHT = 600;
private Rectangle rectangle;
//视频类 FFmpegFrameRecorder
private FFmpegFrameRecorder recorder;
private Robot robot;
//线程池 exec
private ScheduledThreadPoolExecutor exec;
private TargetDataLine line;
private AudioFormat audioFormat;
private DataLine.Info dataLineInfo;
private boolean isHaveDevice = true;
private long startTime = 0;
private long videoTS = 0;
private long pauseTime = 0;
private double frameRate = 24;
public VideoRecode(String fileName, boolean isHaveDevice,Rectangle rectangle1) {
rectangle = rectangle1;
// TODO Auto-generated constructor stub
System.out.println(rectangle1.getWidth());
recorder = new FFmpegFrameRecorder(fileName + ".mp4", (int)rectangle1.getWidth(), (int)rectangle1.getHeight());
//recorder.setVideoCodec(avcodec.AV_CODEC_ID_H265); // 28
// recorder.setVideoCodec(avcodec.AV_CODEC_ID_FLV1); // 28
recorder.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4); // 13
recorder.setFormat("mp4");
// recorder.setFormat("mov,mp4,m4a,3gp,3g2,mj2,h264,ogg,MPEG4");
recorder.setSampleRate(44100);
recorder.setFrameRate(frameRate);
recorder.setVideoQuality(0);
recorder.setVideoOption("crf", "23");
// 2000 kb/s, 720P视频的合理比特率范围
recorder.setVideoBitrate(1000000);
/**
* 权衡quality(视频质量)和encode speed(编码速度) values(值): ultrafast(终极快),superfast(超级快),
* veryfast(非常快), faster(很快), fast(快), medium(中等), slow(慢), slower(很慢),
* veryslow(非常慢)
* ultrafast(终极快)提供最少的压缩(低编码器CPU)和最大的视频流大小;而veryslow(非常慢)提供最佳的压缩(高编码器CPU)的同时降低视频流的大小
* 参考:https://trac.ffmpeg.org/wiki/Encode/H.264 官方原文参考:-preset ultrafast as the
* name implies provides for the fastest possible encoding. If some tradeoff
* between quality and encode speed, go for the speed. This might be needed if
* you are going to be transcoding multiple streams on one machine.
*/
recorder.setVideoOption("preset", "slow");
recorder.setPixelFormat(0); // yuv420p = 0
recorder.setAudioChannels(2);
recorder.setAudioOption("crf", "0");
// Highest quality
recorder.setAudioQuality(0);
recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
try {
robot = new Robot();
} catch (AWTException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
recorder.start();
} catch (Exception e) {
// TODO Auto-generated catch block
System.out.print("*******************************");
}
this.isHaveDevice = isHaveDevice;
}
/**
* 开始录制
*/
public void start() {
if (startTime == 0) {
startTime = System.currentTimeMillis();
}
if (pauseTime == 0) {
pauseTime = System.currentTimeMillis();
}
// 如果有录音设备则启动录音线程
if (isHaveDevice) {
new Thread(new Runnable() {
public void run() {
// TODO Auto-generated method stub
caputre();
}
}).start();
}
// 录屏
screenTimer = new ScheduledThreadPoolExecutor(1);
screenTimer.scheduleAtFixedRate(new Runnable() {
public void run() {
// 将screenshot对象写入图像文件
// try {
// ImageIO.write(screenCapture, "JPEG", f);
// videoGraphics.drawImage(screenCapture, 0, 0, null);
// IplImage image = cvLoadImage(name); // 非常吃内存!!
// // 创建一个 timestamp用来写入帧中
// videoTS = 1000
// * (System.currentTimeMillis() - startTime - (System.currentTimeMillis() -
// pauseTime));
// // 检查偏移量
// if (videoTS > recorder.getTimestamp()) {
// recorder.setTimestamp(videoTS);
// }
BufferedImage screenCapture = robot.createScreenCapture(rectangle); // 截屏
BufferedImage videoImg = new BufferedImage((int)rectangle.getWidth(), (int)rectangle.getHeight(),
BufferedImage.TYPE_3BYTE_BGR); // 声明一个BufferedImage用重绘截图
Graphics2D videoGraphics = videoImg.createGraphics();// 创建videoImg的Graphics2D
videoGraphics.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_DISABLE);
videoGraphics.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING,
RenderingHints.VALUE_COLOR_RENDER_SPEED);
videoGraphics.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED);
videoGraphics.drawImage(screenCapture, 0, 0, null); // 重绘截图
Java2DFrameConverter java2dConverter = new Java2DFrameConverter();
Frame frame = java2dConverter.convert(videoImg);
try {
videoTS = 1000L
* (System.currentTimeMillis() - startTime - (System.currentTimeMillis() - pauseTime));
// 检查偏移量
if (videoTS > recorder.getTimestamp()) {
recorder.setTimestamp(videoTS);
}
recorder.record(frame); // 录制视频
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// 释放资源
videoGraphics.dispose();
videoGraphics = null;
videoImg.flush();
videoImg = null;
java2dConverter = null;
screenCapture.flush();
screenCapture = null;
}
}, (int) (1000 / frameRate), (int) (1000 / frameRate), TimeUnit.MILLISECONDS);
}
/**
* 抓取声音
*/
public void caputre() {
audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
try {
line = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
} catch (LineUnavailableException e1) {
// TODO Auto-generated catch block
System.out.println("#################");
}
try {
line.open(audioFormat);
} catch (LineUnavailableException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
line.start();
final int sampleRate = (int) audioFormat.getSampleRate();
final int numChannels = audioFormat.getChannels();
int audioBufferSize = sampleRate * numChannels;
final byte[] audioBytes = new byte[audioBufferSize];
exec = new ScheduledThreadPoolExecutor(1);
exec.scheduleAtFixedRate(new Runnable() {
public void run() {
try {
int nBytesRead = line.read(audioBytes, 0, line.available());
int nSamplesRead = nBytesRead / 2;
short[] samples = new short[nSamplesRead];
// Let's wrap our short[] into a ShortBuffer and
// pass it to recordSamples
ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples);
ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, nSamplesRead);
// recorder is instance of
// org.bytedeco.javacv.FFmpegFrameRecorder
recorder.recordSamples(sampleRate, numChannels, sBuff);
// System.gc();
} catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
e.printStackTrace();
}
}
}, (int) (1000 / frameRate), (int) (1000 / frameRate), TimeUnit.MILLISECONDS);
}
/**
* 停止
*/
public void stop() {
if (null != screenTimer) {
screenTimer.shutdownNow();
}
try {
recorder.stop();
recorder.release();
recorder.close();
screenTimer = null;
// screenCapture = null;
if (isHaveDevice) {
if (null != exec) {
exec.shutdownNow();
}
if (null != line) {
line.stop();
line.close();
}
dataLineInfo = null;
audioFormat = null;
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* 暂停
*
* @throws Exception
*/
public void pause() throws Exception {
screenTimer.shutdownNow();
screenTimer = null;
if (isHaveDevice) {
exec.shutdownNow();
exec = null;
line.stop();
line.close();
dataLineInfo = null;
audioFormat = null;
line = null;
}
pauseTime = System.currentTimeMillis();
}
}