java实现一个录像大师
javacv从入门到入土系列,发现了个好玩的东西,视频处理,于是我想搞了个屏幕录屏大师,这里我使用javafx进行页面显示。 依赖
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv</artifactId>
<version>1.5.6</version>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>ffmpeg-platform</artifactId>
<version>4.4-1.5.6</version>
</dependency>
实现
package top.lingkang.test.gui;
import javafx.application.Application;
import javafx.application.Platform;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.scene.Scene;
import javafx.scene.control.Alert;
import javafx.scene.control.Button;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
import javafx.stage.WindowEvent;
import org.bytedeco.ffmpeg.avcodec.AVPacket;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.javacv.*;
import javax.sound.sampled.*;
import java.io.File;
import java.util.Timer;
import java.util.TimerTask;
public class DesktopRecording extends Application {
private static final int frameRate = 10;
private static boolean isStop = false;
private static TargetDataLine line;
private static File audioFile, videoFile;
private static ImageView imageVideo;
private static boolean isStart;
@Override
public void start(Stage primaryStage) throws Exception {
primaryStage.setTitle("lingkang-桌面录屏emm...");
imageVideo = new ImageView();
imageVideo.setFitWidth(800);
imageVideo.setFitHeight(600);
Button button = new Button("停止录制");
button.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
isStop = true;
Alert alert = new Alert(Alert.AlertType.INFORMATION);
alert.setTitle("info");
alert.setHeaderText("已经停止录制");
alert.setOnCloseRequest(event1 -> alert.hide());
alert.showAndWait();
}
});
VBox box = new VBox();
box.getChildren().addAll(button, imageVideo);
primaryStage.setScene(new Scene(box));
primaryStage.setHeight(600);
primaryStage.setWidth(800);
primaryStage.show();
primaryStage.setOnCloseRequest(new EventHandler<WindowEvent>() {
@Override
public void handle(WindowEvent event) {
isStop = true;
System.exit(0);
}
});
FrameGrabber grabber = new FFmpegFrameGrabber("desktop");
grabber.setFormat("gdigrab");
grabber.setFrameRate(frameRate);
grabber.setImageHeight(600);
grabber.setImageWidth(800);
grabber.setOption("offset_y", "200");
grabber.start();
videoFile = File.createTempFile("lingkang", ".avi");
FFmpegFrameRecorder recorder = FFmpegFrameRecorder.createDefault(videoFile, grabber.getImageWidth(), grabber.getImageHeight());
recorder.setVideoOption("tune", "zerolatency");
recorder.setFrameRate(frameRate);
recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
recorder.start();
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
if (isStop) {
try {
recorder.stop();
grabber.stop();
recorder.release();
grabber.release();
} catch (Exception e) {
e.printStackTrace();
}
new Thread(new Runnable() {
@Override
public void run() {
try {
composeVideo();
} catch (Exception e) {
e.printStackTrace();
}
}
}).start();
timer.cancel();
return;
}
if (!isStart) {
isStart = true;
startAudio();
}
try {
Frame frame = grabber.grabFrame();
recorder.record(frame);
Image convert = new JavaFXFrameConverter().convert(frame);
imageVideo.setImage(convert);
} catch (Exception e) {
e.printStackTrace();
}
}
}, 2000, 1000 / frameRate);
}
private void composeVideo() throws Exception {
FFmpegFrameGrabber imageGrabber = new FFmpegFrameGrabber(videoFile);
imageGrabber.start();
FFmpegFrameGrabber audioGrabber = new FFmpegFrameGrabber(audioFile);
audioGrabber.start();
File file = new File("D://video666666.avi");
if (file.exists())
file.delete();
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(file, imageGrabber.getImageWidth(),
imageGrabber.getImageHeight(), 1);
recorder.setInterleaved(true);
recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
recorder.start(imageGrabber.getFormatContext());
long videoTime = imageGrabber.getLengthInTime();
AVPacket packet = null;
for (; (packet = imageGrabber.grabPacket()) != null; ) {
recorder.recordPacket(packet);
}
Frame sampleFrame = null;
while ((sampleFrame = audioGrabber.grabSamples()) != null) {
recorder.record(sampleFrame);
if (audioGrabber.getTimestamp() >= videoTime) {
break;
}
}
recorder.close();
audioGrabber.close();
imageGrabber.close();
System.out.println("视频合成完毕!");
}
private void startAudio() {
new Thread(new Runnable() {
@Override
public void run() {
try {
AudioFormat audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
line = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
line.open(audioFormat);
line.start();
audioFile = File.createTempFile(String.valueOf(System.currentTimeMillis()), ".wav");
AudioInputStream ais = new AudioInputStream(line);
AudioSystem.write(ais, AudioFileFormat.Type.WAVE, audioFile);
} catch (Exception e) {
e.printStackTrace();
}
}
}).start();
}
public static void main(String[] args) throws Exception {
launch(args);
}
}
|