dependency>
groupId>org.bytedecogroupId>
artifactId>javacv-platformartifactId>
version>1.5.6version>
dependency>
import org.apache.commons.lang3.StringUtils;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.Java2DFrameConverter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import org.springframework.util.DigestUtils;
import javax.annotation.Resource;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.List;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
public class FfmpegUtil {
private static Logger logger = LoggerFactory.getLogger(FfmpegUtil.class);
public String generateTagVideo(String srcVideoPath, int frameRate, String extName, String imageUrl, String srcVideoParamUrl) {
return doGenerateTagVideo(srcVideoPath, frameRate, extName, imageUrl, srcVideoParamUrl);
}
private String doGenerateTagVideo(String srcVideoPath, int frameRate, String extName, String imageUrl, String srcVideoParamUrl) {
MapInteger, ListFrame>> frameMap = new ConcurrentHashMap>(16);
MapInteger, CompletableFutureListFrame>>> frameFeatureMap = new ConcurrentHashMap>(16);
try {
//视频文件路径
File srcVideoFile = new File(srcVideoPath);
if (!srcVideoFile.exists() || !srcVideoFile.isFile()) {
logger.info("generateTagVideo srcVideoPath is not video file {}", srcVideoPath);
return "";
}
String fileName = DigestUtils.md5DigestAsHex((srcVideoParamUrl + "_" + imageUrl).getBytes(StandardCharsets.UTF_8));
String targetVideoPath = srcVideoFile.getParentFile() + File.separator + fileName + "_tag." + extName;
File targetVideoFile = new File(targetVideoPath);
if (targetVideoFile.exists()) {
logger.info("generateTagVideo video existed ,file path {}", targetVideoPath);
return targetVideoFile.getAbsolutePath();
}
FFmpegFrameGrabber grabber = initGrabber(srcVideoPath);
assert grabber != null;
int width = grabber.getImageWidth();
int height = grabber.getImageHeight();
double videoFrameRate = grabber.getFrameRate();
int frameLength = grabber.getLengthInFrames();
int cpuCount = Runtime.getRuntime().availableProcessors();
//将图片进行结构化
ListDetectFeatures> aiDetectResult = detectService.aiDetectAll(imageUrl, false, false);
//用户人脸特征值,后续用于计算打标位置,可忽略
String userFaceFeature = "用户人脸特征值";
//用户人体特征值,后续用于计算打标位置,可忽略
String userBodyFeature = "用户人体特征值";
FFmpegFrameRecorder recorder = initRecorder(targetVideoPath, extName, videoFrameRate, width, height);
assert recorder != null;
int mapIndex = 0;
frameRate = Math.max(Math.min(5, frameRate), (int) (grabber.getFrameRate() / 2));
Frame frame;
File imgRoot = new File(targetVideoFile.getParentFile(), System.currentTimeMillis() + "_img");
imgRoot.mkdirs();
AtomicInteger index = new AtomicInteger(0);
while ((frame = grabber.grabImage()) != null) {
if (frame.image == null) {
break;
}
Frame frameTemp = frame.clone();
ListFrame> frames = frameMap.getOrDefault(mapIndex, new ArrayList>(frameRate));
if (frames.size() frameRate) {
frames.add(frameTemp);
} else {
frames = frameMap.getOrDefault(++mapIndex, new ArrayList>(frameRate));
frames.add(frameTemp);
}
frameMap.put(mapIndex, frames);
if (frames.size() == frameRate) {
CompletableFutureListFrame>> future = asyncConvertFrame(imgRoot, userFaceFeature, userBodyFeature, frames);
frameFeatureMap.put(mapIndex, future);
}
if (frameFeatureMap.size() == 2 * cpuCount) {
recordFrameMap(recorder, frameFeatureMap, index);
frameMap.clear();
mapIndex = 0;
}
}
if (!CollectionUtils.isEmpty(frameMap)) {
ListFrame> frames = frameMap.getOrDefault(frameMap.size() - 1, new ArrayList>());
CompletableFutureListFrame>> future = asyncConvertFrame(imgRoot, userFaceFeature, userBodyFeature, frames);
frameFeatureMap.put(mapIndex, future);
recordFrameMap(recorder, frameFeatureMap, index);
frameMap.clear();
}
logger.info("index length {}, frame length {}", index.get(), frameLength);
//删除图片文件
deleteFile(imgRoot);
grabber.close();
recorder.close();
return targetVideoFile.getAbsolutePath();
} catch (Exception e) {
logger.error("exception ", e);
}
return "";
}
private FFmpegFrameGrabber initGrabber(String srcVideoPath) {
try {
FFmpegFrameGrabber grabber = FFmpegFrameGrabber.createDefault(new File(srcVideoPath));
grabber.setFormat(StringUtils.substringAfterLast(srcVideoPath, ".").toLowerCase());
grabber.start();
return grabber;
} catch (Exception e) {
logger.error("initGrabber error", e);
}
return null;
}
private FFmpegFrameRecorder initRecorder(String targetVideoFile, String extName, double videoFrameRate, int width, int height) {
try {
FFmpegFrameRecorder recorder = FFmpegFrameRecorder.createDefault(targetVideoFile, width, height);
recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
recorder.setVideoQuality(25);
recorder.setFormat(extName.toLowerCase());
recorder.setFrameRate(videoFrameRate);
recorder.setVideoBitrate(1000000);//数值越大,存储空间越大
recorder.setVideoOption("preset", "ultrafast");//究极快 ultrafast ,非常快 veryf服务器托管网ast ,快 fast, 中等 medium
recorder.start();
return recorder;
} catch (Exception e) {
logger.error("initRecorder error ", e);
}
return null;
}
private void recordFrameMap(FFmpegFrameRecorder recorder, MapInteger, CompletableFutureListFrame>>> frameFeatureMap, AtomicInteger index) {
for (int i = 0; i frameFeatureMap.size(); i++) {
CompletableFutureListFrame>> future = frameFeatureMap.get(i);
if (future != null) {
try {
ListFrame> frameList = future.get();
index.getAndAdd(frameList.size());
logger.info("i = {} ,index length {}", i, index.get());
recordFrame(recorder, frameList);
} catch (Exception e) {
logger.error("recordFrameMap ", e);
}
}
}
frameFeatureMap.clear();
}
private void recordFrame(FFmpegFrameRecorder recorder, ListFrame> frameList) {
try {
if (frameList.size() > 0) {
for (Frame frame : frameList) {
recorder.record(frame);
}
frameList.clear();
}
} catch (Exception e) {
logger.error("recordFrame error ", e);
}
}
private CompletableFutureListFrame>> asyncConvertFrame(File imgRoot, String userFaceFeature, String userBodyFeature, ListFrame> frames) {
return CompletableFuture.supplyAsync(() -> convertFrame(imgRoot, userFaceFeature, userBodyFeature, frames));
}
private ListFrame> convertFrame(File imgRoot, String userFaceFeature, String userBodyFeature, ListFrame> frames) {
ListFrame> framesList = new ArrayList>(frames.size());
logger.info("asyncConvertFrame frameSize {}", frames.size());
try {
if (!CollectionUtils.isEmpty(frames)) {
int x = 0, y = 0, width = 0, height = 0;
MapString, Integer> location = null;
Java2DFrameConverter converter;
BasicStroke basicStroke = new BasicStroke(5);
for (int i = 0; i frames.size(); i++) {
Frame frame = frames.get(i);
Frame frameTemp = frame.clone();
if (i == 0) {
converter = new Java2DFrameConverter();
BufferedImage image = converter.getBufferedImage(frameTemp);
location = rpcFindLocation(image, imgRoot, userFaceFeature, userBodyFea服务器托管网ture);
if (!CollectionUtils.isEmpty(location)) {
x = location.get("x");
y = location.get("y");
width = location.get("w");
height = location.get("h");
Graphics2D graphics = (Graphics2D) image.getGraphics();
graphics.setStroke(basicStroke);
graphics.setColor(Color.YELLOW);
graphics.drawRect(x, y, width, height);
Frame drawFrame = converter.convert(image);
framesList.add(drawFrame);
} else {
framesList.add(frameTemp);
}
} else {
if (!CollectionUtils.isEmpty(location)) {
converter = new Java2DFrameConverter();
BufferedImage image = converter.getBufferedImage(frameTemp);
Graphics2D graphics = (Graphics2D) image.getGraphics();
graphics.setStroke(basicStroke);
graphics.setColor(Color.YELLOW);
graphics.drawRect(x, y, width, height);
Frame drawFrame = converter.convert(image);
framesList.add(drawFrame);
} else {
framesList.add(frameTemp);
}
}
}
}
} catch (Exception e) {
logger.error("asyncConvertFrame error ", e);
}
frames.clear();
return framesList;
}
/**
* 对图片文件RPC调用, 主要是计算出需要打标的坐标位置
*
* @param image 图片文件
* @return x, y, w, h
*/
private MapString, Integer> rpcFindLocation(BufferedImage image, File targetFile, String userFaceFeature, String userBodyFeature) {
MapString, Integer> location = new HashMap>(8);
try {
File imageFile = new File(targetFile, System.currentTimeMillis() + ".jpg");
OutputStream outputStream = Files.newOutputStream(imageFile.toPath());
ImageIO.setUseCache(true);
ImageIO.write(image, "jpg", outputStream);
//TODO 计算出需要对哪个位置打标
location.put("x", 0);
location.put("y", 100);
location.put("w", 100);
location.put("h", 100);
} catch (Exception ex) {
logger.error("rpcFindLocation error ", ex);
}
return location;
}
}
服务器托管,北京服务器托管,服务器租用 http://www.fwqtg.net
摘要 本文将深入探索OpenCV(开源计算机视觉库)的基本概念、应用领域、主要功能和未来发展。通过本文,读者将能够理解OpenCV在计算机视觉中的重要性,并掌握其基本使用方法。 一、引言 随着人工智能和机器学习技术的飞速发展,计算机视觉作为其中的重要分支,正逐…