新增模型

This commit is contained in:
2025-09-26 17:18:13 +08:00
parent 77f18452c1
commit 456c7f4a01
17 changed files with 19172 additions and 282 deletions

View File

@@ -67,14 +67,47 @@
<version>2.6</version>
</dependency>
<!--加载本地jar包-->
<!-- &lt;!&ndash;加载本地jar包&ndash;&gt;-->
<!-- <dependency>-->
<!-- <groupId>sunjce_provider</groupId>-->
<!-- <artifactId>sunjce_provider</artifactId>-->
<!-- <version>0.0.1</version>-->
<!-- <scope>system</scope>-->
<!-- <systemPath>${project.basedir}/src/main/resources/libs/arcsoft-sdk-face-3.0.0.0.jar-->
<!-- </systemPath>-->
<!-- </dependency>-->
<!-- JavaCV 整合(包含 OpenCV/FFmpeg 的平台二进制,跨平台最省心) -->
<dependency>
<groupId>sunjce_provider</groupId>
<artifactId>sunjce_provider</artifactId>
<version>0.0.1</version>
<scope>system</scope>
<systemPath>${project.basedir}/src/main/resources/libs/arcsoft-sdk-face-3.0.0.0.jar
</systemPath>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.10</version>
</dependency>
<!-- 解析 models.json 用(如果你按我给的多模型配置走) -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.17.1</version>
</dependency>
<!-- JavaCV 封装 -->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.10</version> <!-- 建议用 1.5.10 或更高版本 -->
</dependency>
<!-- OpenCV 平台包(含 DNN 模块) -->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>opencv-platform</artifactId>
<version>4.9.0-1.5.10</version>
</dependency>
<!-- FFmpeg 平台包(用于视频流解码、推流) -->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>ffmpeg-platform</artifactId>
<version>6.1.1-1.5.10</version>
</dependency>
</dependencies>

View File

@@ -0,0 +1,47 @@
package com.ruoyi.video.common;
import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.ruoyi.video.thread.detector.*;
import java.net.URL;
import java.nio.file.*;
import java.util.*;
public final class ModelManager implements AutoCloseable {
private final Map<String, YoloDetector> map = new LinkedHashMap<>();
public void load(URL modelsJson) throws Exception {
ObjectMapper om = new ObjectMapper();
ArrayNode arr = (ArrayNode) om.readTree(modelsJson);
// 简易调色板(不同模型不同颜色)
int[] palette = {0x00FF00, 0xFF8000, 0x00A0FF, 0xFF00FF, 0x00FFFF, 0xFF0000, 0x80FF00};
int i=0;
for (var node : arr) {
String name = node.get("name").asText();
String path = node.get("path").asText();
int w = node.get("size").get(0).asInt();
int h = node.get("size").get(1).asInt();
String backend = node.get("backend").asText();
URL dirUrl = Objects.requireNonNull(getClass().getClassLoader().getResource(path),
"Resource not found: " + path);
Path dir = Paths.get(dirUrl.toURI());
int rgb = palette[i % palette.length]; i++;
int bgr = ((rgb & 0xFF) << 16) | (rgb & 0xFF00) | ((rgb >> 16) & 0xFF);
YoloDetector det = new OpenVinoYoloDetector(name, dir, w, h, backend, bgr);
map.put(name, det);
}
}
public YoloDetector get(String name){ return map.get(name); }
public List<YoloDetector> all(){ return new ArrayList<>(map.values()); }
@Override public void close() {
map.values().forEach(d -> { try { d.close(); } catch(Exception ignored){} });
map.clear();
}
}

View File

@@ -0,0 +1,6 @@
package com.ruoyi.video.domain;
import org.bytedeco.opencv.opencv_core.Rect;
public record Detection(String cls, float conf, Rect box, int colorBGR) {
}

View File

@@ -99,8 +99,6 @@ public class FlvHandler extends SimpleChannelInboundHandler<Object> {
}
if (!req.decoderResult().isSuccess() || (!"websocket".equals(req.headers().get("Upgrade")))) {
// http请求
System.out.println("q3gegyaisgd");
sendFlvReqHeader(ctx);
mediaService.playForHttp(cameraDto, ctx);

View File

@@ -1,35 +1,38 @@
package com.ruoyi.video.thread;
import com.arcsoft.face.toolkit.ImageInfo;
import com.ruoyi.common.utils.spring.SpringUtils;
import com.ruoyi.video.common.ClientType;
import com.ruoyi.video.common.ModelManager; // ★ 新增:多模型管理(见前面提供的类)
import com.ruoyi.video.domain.Detection; // ★ 新增:检测结果(见前面提供的类)
import com.ruoyi.video.domain.dto.CameraDto;
import com.ruoyi.video.service.MediaService;
import com.ruoyi.video.thread.detector.CompositeDetector; // ★ 新增:并行多模型
import com.ruoyi.video.thread.detector.YoloDetector; // ★ 新增:检测接口
import com.ruoyi.video.utils.Overlay; // ★ 新增:画框工具
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.websocketx.BinaryWebSocketFrame;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.GenericFutureListener;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import lombok.extern.slf4j.Slf4j;
import org.bytedeco.ffmpeg.avcodec.AVPacket;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.FFmpegLogCallback;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.FrameGrabber;
import org.bytedeco.javacv.*;
import org.bytedeco.opencv.opencv_core.Mat;
import org.springframework.scheduling.annotation.Async;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URL;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.LockSupport;
import static org.bytedeco.opencv.global.opencv_core.CV_8UC3;
/**
* @Author: orange
* @CreateTime: 2025-01-16
@@ -42,125 +45,98 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
FFmpegLogCallback.set();
}
/*** ====== 原有字段 ====== ***/
private final ConcurrentHashMap<String, ChannelHandlerContext> wsClients = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, ChannelHandlerContext> httpClients = new ConcurrentHashMap<>();
/**
* ws客户端
*/
private ConcurrentHashMap<String, ChannelHandlerContext> wsClients = new ConcurrentHashMap<>();
/**
* http客户端
*/
private ConcurrentHashMap<String, ChannelHandlerContext> httpClients = new ConcurrentHashMap<>();
/**
* 运行状态
*/
private volatile boolean running = false;
private boolean grabberStatus = false;
private boolean recorderStatus = false;
/**
* 当前在线人数
*/
private int hcSize, wcSize = 0;
/**
* 用于没有客户端时候的计时
*/
private int noClient = 0;
/**
* flv header
*/
private byte[] header = null;
// 输出流,视频最终会输出到此
private ByteArrayOutputStream bos = new ByteArrayOutputStream();
private final ByteArrayOutputStream bos = new ByteArrayOutputStream();
FFmpegFrameGrabber grabber;// 拉流器
FFmpegFrameRecorder recorder;// 推流录制器
private FFmpegFrameGrabber grabber; // 拉流器
private FFmpegFrameRecorder recorder; // 推流录制器
/**
* true:转复用,false:转码
*/
boolean transferFlag = false;// 默认转码
/** true:转复用,false:转码 */
private boolean transferFlag = false; // 默认转码
/**
* 相机
*/
private CameraDto cameraDto;
/**
* 监听线程,用于监听状态
*/
private final CameraDto cameraDto;
private Thread listenThread;
/*** ====== 新增:推理相关字段 ====== ***/
// 开关:是否启用检测(可对外提供 setter
private boolean enableDetection = true;
// 模型与推理
private ModelManager modelManager;
private YoloDetector detector;
// 三线程解耦所需
private final OpenCVFrameConverter.ToMat toMat = new OpenCVFrameConverter.ToMat();
private final AtomicReference<Mat> latestFrame = new AtomicReference<>();
private final AtomicReference<List<Detection>> latestDetections =
new AtomicReference<>(java.util.Collections.emptyList());
public MediaTransferFlvByJavacv(CameraDto cameraDto) {
super();
this.cameraDto = cameraDto;
}
public boolean isRunning() {
return running;
}
public void setRunning(boolean running) {
this.running = running;
}
public boolean isGrabberStatus() {
return grabberStatus;
}
public void setGrabberStatus(boolean grabberStatus) {
this.grabberStatus = grabberStatus;
}
public boolean isRecorderStatus() {
return recorderStatus;
}
public void setRecorderStatus(boolean recorderStatus) {
this.recorderStatus = recorderStatus;
public boolean isRunning() { return running; }
public void setRunning(boolean running) { this.running = running; }
public boolean isGrabberStatus() { return grabberStatus; }
public void setGrabberStatus(boolean grabberStatus) { this.grabberStatus = grabberStatus; }
public boolean isRecorderStatus() { return recorderStatus; }
public void setRecorderStatus(boolean recorderStatus) { this.recorderStatus = recorderStatus; }
public void setEnableDetection(boolean enable) { this.enableDetection = enable; }
/*** ====== 推理初始化 ====== ***/
private void initDetectors() throws Exception {
if (!enableDetection) return;
modelManager = new ModelManager();
URL json = getClass().getResource("/models/models.json");
modelManager.load(json);
// 单模型: detector = modelManager.get("person-helmet");
// 多模型并行示例并行度按CPU核数/模型大小调整:
detector = new CompositeDetector(
"all-models",
java.util.List.of(modelManager.get("person-helmet"), modelManager.get("vehicle-plate")),
2
);
log.info("YOLO detectors ready: {}", detector.name());
}
/*** ====== 拉流器 ====== ***/
protected boolean createGrabber() {
grabber = new FFmpegFrameGrabber(cameraDto.getUrl());
// 这些参数很多是“微秒”单位;如果你的 getNetTimeout() / getReadOrWriteTimeout() 是毫秒,
// 记得 *1000 转为微秒字符串。这里演示保守固定值,先跑通为先。
String fiveSecUs = "5000000"; // 5s in microseconds
String oneMb = "1048576"; // 1MB
// 注意:这些是微秒字符串
String fiveSecUs = "5000000";
String oneMb = "1048576";
// ---- 通用优化 ----
grabber.setOption("threads", "1");
grabber.setOption("buffer_size", oneMb); // 输入缓冲
grabber.setOption("rw_timeout", fiveSecUs); // 读写超时(微秒)
grabber.setOption("stimeout", fiveSecUs); // 套接字超时(微秒)
grabber.setOption("probesize", fiveSecUs); // 探测时长(微秒)
grabber.setOption("analyzeduration", fiveSecUs);// 解析时长(微秒)
grabber.setOption("fflags", "nobuffer"); // 低延迟
grabber.setOption("buffer_size", oneMb);
grabber.setOption("rw_timeout", fiveSecUs);
grabber.setOption("stimeout", fiveSecUs);
grabber.setOption("probesize", "1048576"); // ← 修正probesize 是“字节”
grabber.setOption("analyzeduration", fiveSecUs);
grabber.setOption("fflags", "nobuffer");
grabber.setOption("flags", "low_delay");
grabber.setOption("loglevel", "debug"); // 先开调试,稳定后可关
grabber.setOption("loglevel", "error"); // 稳定后压低日志
// ---- RTSP 关键修正 ----
if (cameraDto.getUrl().toLowerCase().startsWith("rtsp://")) {
// 统一走 TCPRTP over RTSP/interleaved避免 461 / NAT / 防火墙问题
grabber.setOption("rtsp_transport", "tcp");
// 不再设置 prefer_tcp没意义了确保不带 multicast 相关 flag
// 只要视频轨,避免音频轨导致的协商失败(可按需移除)
grabber.setOption("rtsp_transport", "tcp"); // 你要测 UDP 再改
grabber.setOption("allowed_media_types", "video");
// 避免较大的 RTP 乱序延迟
grabber.setOption("max_delay", "500000"); // 0.5s (微秒)
// 某些设备对 UA 比较挑,设置一个常见 UA可选
grabber.setOption("max_delay", "500000");
grabber.setOption("user_agent", "Lavf/60");
}
else if (cameraDto.getUrl().toLowerCase().startsWith("rtmp://")) {
} else if (cameraDto.getUrl().toLowerCase().startsWith("rtmp://")) {
grabber.setOption("rtmp_buffer", "1000");
}
else if ("desktop".equalsIgnoreCase(cameraDto.getUrl())) {
} else if ("desktop".equalsIgnoreCase(cameraDto.getUrl())) {
grabber.setFormat("gdigrab");
grabber.setOption("draw_mouse", "1");
grabber.setNumBuffers(0);
@@ -175,82 +151,62 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
return (grabberStatus = true);
} catch (FrameGrabber.Exception e) {
MediaService.cameras.remove(cameraDto.getMediaKey());
log.error("\n{}\n启动拉流器失败网络超时或视频源不可用{}", cameraDto.getUrl(), e.getMessage());
log.error("\n{}\n启动拉流器失败网络超时或视频源不可用{}",
cameraDto.getUrl(), e.getMessage());
return (grabberStatus = false);
}
}
/**
* 创建转码推流录制器
*
* @return
*/
/*** ====== 录制器(转码/转复用) ====== ***/
protected boolean createTransterOrRecodeRecorder() {
// 若启用检测,必须转码(因为需要在像素上画框)
if (enableDetection) transferFlag = false;
recorder = new FFmpegFrameRecorder(bos, grabber.getImageWidth(), grabber.getImageHeight(),
grabber.getAudioChannels());
recorder.setFormat("flv");
if (!transferFlag) {
// 转码
// 转码(低延迟 H.264
recorder.setInterleaved(false);
recorder.setVideoOption("tune", "zerolatency");
recorder.setVideoOption("preset", "ultrafast");
recorder.setVideoOption("crf", "26");
recorder.setVideoOption("threads", "1");
recorder.setFrameRate(25);// 设置帧率
recorder.setGopSize(25);// 设置gop,与帧率相同相当于间隔1秒chan's一个关键帧
// recorder.setVideoBitrate(500 * 1000);// 码率500kb/s
// recorder.setVideoCodecName("libx264"); //javacv 1.5.5无法使用libx264名称请使用下面方法
recorder.setFrameRate(25);
recorder.setGopSize(25);
recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
// recorder.setAudioCodecName("aac");
/**
* 启用RDOQ算法优化视频质量 1在视频码率和视频质量之间取得平衡 2最大程度优化视频质量会降低编码速度和提高码率
*/
recorder.setTrellis(1);
recorder.setMaxDelay(0);// 设置延迟
recorder.setMaxDelay(0);
try {
recorder.start();
return recorderStatus = true;
} catch (org.bytedeco.javacv.FrameRecorder.Exception e1) {
} catch (FrameRecorder.Exception e1) {
log.info("启动转码录制器失败", e1);
MediaService.cameras.remove(cameraDto.getMediaKey());
e1.printStackTrace();
}
} else {
// 转复用
// 不让recorder关联关闭outputStream
// 转复用(不画框时可用)
recorder.setCloseOutputStream(false);
try {
recorder.start(grabber.getFormatContext());
return recorderStatus = true;
} catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
log.warn("\r\n{}\r\n启动转复用录制器失败", cameraDto.getUrl());
// 如果转复用失败,则自动切换到转码模式
} catch (FrameRecorder.Exception e) {
log.warn("\r\n{}\r\n启动转复用录制器失败,自动切换转码", cameraDto.getUrl());
transferFlag = false;
if (recorder != null) {
try {
recorder.stop();
} catch (org.bytedeco.javacv.FrameRecorder.Exception e1) {
}
}
try { recorder.stop(); } catch (FrameRecorder.Exception ignored) {}
if (createTransterOrRecodeRecorder()) {
log.error("\r\n{}\r\n切换到转码模式", cameraDto.getUrl());
return true;
}
log.error("\r\n{}\r\n切换转码模式失败", cameraDto.getUrl());
e.printStackTrace();
log.error("\r\n{}\r\n切换转码模式失败", cameraDto.getUrl(), e);
}
}
return recorderStatus = false;
}
/**
* 是否支持flv的音视频编码
*
* @return
*/
private boolean supportFlvFormatCodec() {
int vcodec = grabber.getVideoCodec();
int acodec = grabber.getAudioCodec();
@@ -260,44 +216,99 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
&& (avcodec.AV_CODEC_ID_AAC == acodec || avcodec.AV_CODEC_ID_AAC_LATM == acodec);
}
/**
* 将视频源转换为flv
*/
/*** ====== 主流程:转换为 FLV 并输出 ====== ***/
protected void transferStream2Flv() {
if (!createGrabber()) {
return;
}
transferFlag = supportFlvFormatCodec();
if (!createTransterOrRecodeRecorder()) {
return;
try {
if (enableDetection) initDetectors();
} catch (Exception e) {
log.error("初始化检测模型失败:{}", e.getMessage(), e);
// 模型失败也不中断推流,只是不画框
enableDetection = false;
}
try {
grabber.flush();
} catch (FrameGrabber.Exception e) {
log.info("清空拉流器缓存失败", e);
e.printStackTrace();
}
if (!createGrabber()) return;
// 如果未启用检测,且编解码本身支持 FLV可以转复用提升性能
if (!enableDetection) transferFlag = supportFlvFormatCodec();
if (!createTransterOrRecodeRecorder()) return;
try { grabber.flush(); } catch (FrameGrabber.Exception e) { log.info("清空拉流器缓存失败", e); }
if (header == null) {
header = bos.toByteArray();
// System.out.println(HexUtil.encodeHexStr(header));
bos.reset();
}
running = true;
// 启动监听线程(用于判断是否需要自动关闭推流)
listenClient();
// 时间戳计算
long startTime = 0;
long videoTS = 0;
for (; running && grabberStatus && recorderStatus; ) {
// === 若启用检测,启动“解码→推理→渲染”解耦线程 ===
Thread tDecode = null, tInfer = null;
if (enableDetection) {
// 解码线程:仅更新 latestFrame覆盖式不阻塞
tDecode = new Thread(() -> {
while (running && grabberStatus) {
try {
Frame f = grabber.grabImage();
if (f == null) continue;
Mat m = toMat.convert(f);
if (m == null || m.empty()) continue;
Mat copy = new Mat(m.rows(), m.cols(), CV_8UC3);
m.copyTo(copy);
Mat old = latestFrame.getAndSet(copy);
if (old != null) old.release();
} catch (Exception e) {
log.debug("decode err: {}", e.getMessage());
}
}
}, "det-decode");
// 推理线程:限速(默认 15 FPS更新 latestDetections
int inferFps = 15;
long period = 1_000_000_000L / inferFps;
tInfer = new Thread(() -> {
long next = System.nanoTime();
while (running && grabberStatus) {
long now = System.nanoTime();
if (now < next) { LockSupport.parkNanos(next - now); continue; }
next += period;
Mat src = latestFrame.get();
if (src == null || src.empty()) continue;
Mat snap = new Mat(); src.copyTo(snap);
try {
List<Detection> dets = detector.detect(snap);
latestDetections.set(dets);
} catch (Throwable e) {
log.debug("infer err: {}", e.getMessage());
} finally {
snap.release();
}
}
}, "det-infer");
// 抢先预热一次,避免前几帧无框
try {
Frame warm = grabber.grabImage();
if (warm != null) {
Mat wm = toMat.convert(warm);
if (wm != null && !wm.empty() && detector != null) {
latestDetections.set(detector.detect(wm));
}
}
} catch (Exception ignored) { }
tDecode.start();
tInfer.start();
}
// === 主发送循环(转复用/转码两种路径) ===
for (; running && grabberStatus && recorderStatus; ) {
try {
if (transferFlag) {
// 转复用
// ---- 转复用(不画框)----
long startGrab = System.currentTimeMillis();
AVPacket pkt = grabber.grabPacket();
if ((System.currentTimeMillis() - startGrab) > 5000) {
@@ -305,49 +316,45 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
closeMedia();
break;
}
if (null != pkt && !pkt.isNull()) {
if (startTime == 0) {
startTime = System.currentTimeMillis();
}
if (pkt != null && !pkt.isNull()) {
if (startTime == 0) startTime = System.currentTimeMillis();
videoTS = 1000 * (System.currentTimeMillis() - startTime);
// 判断时间偏移
if (videoTS > recorder.getTimestamp()) {
recorder.setTimestamp((videoTS));
}
if (videoTS > recorder.getTimestamp()) recorder.setTimestamp(videoTS);
recorder.recordPacket(pkt);
}
} else {
// 转码
// ---- 转码(可画框)----
long startGrab = System.currentTimeMillis();
Frame frame = grabber.grab();
Frame frame;
if (enableDetection) {
// 如果启用检测,解码线程已在跑;这里直接从 latestFrame 取,减少重复解码
Mat src = latestFrame.get();
if (src == null || src.empty()) continue;
// 叠加最近一次检测结果
Overlay.draw(latestDetections.get(), src);
frame = toMat.convert(src);
} else {
// 未开启检测:直接 grab 并转码
frame = grabber.grab();
}
if ((System.currentTimeMillis() - startGrab) > 5000) {
log.info("\r\n{}\r\n视频流网络异常>>>", cameraDto.getUrl());
closeMedia();
break;
}
if (frame != null) {
if (startTime == 0) {
startTime = System.currentTimeMillis();
}
if (startTime == 0) startTime = System.currentTimeMillis();
videoTS = 1000 * (System.currentTimeMillis() - startTime);
// 判断时间偏移
if (videoTS > recorder.getTimestamp()) {
// System.out.println("矫正时间戳: " + videoTS + " : " + recorder.getTimestamp() + "
// -> "
// + (videoTS - recorder.getTimestamp()));
recorder.setTimestamp((videoTS));
}
if (videoTS > recorder.getTimestamp()) recorder.setTimestamp(videoTS);
recorder.record(frame);
}
}
} catch (FrameGrabber.Exception e) {
grabberStatus = false;
MediaService.cameras.remove(cameraDto.getMediaKey());
} catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
} catch (FrameRecorder.Exception e) {
recorderStatus = false;
MediaService.cameras.remove(cameraDto.getMediaKey());
}
@@ -355,34 +362,27 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
if (bos.size() > 0) {
byte[] b = bos.toByteArray();
bos.reset();
// 发送视频到前端
sendFrameData(b);
}
}
// 启动失败,直接关闭, close包含stop和release方法。录制文件必须保证最后执行stop()方法
// === 收尾 ===
try {
recorder.close();
grabber.close();
if (detector != null) try { detector.close(); } catch (Exception ignored) {}
if (modelManager != null) try { modelManager.close(); } catch (Exception ignored) {}
if (recorder != null) recorder.close();
if (grabber != null) grabber.close();
bos.close();
} catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
e.printStackTrace();
} catch (FrameGrabber.Exception e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (Exception ignored) {
} finally {
Mat m = latestFrame.getAndSet(null);
if (m != null) m.release();
closeMedia();
}
log.info("关闭媒体流-javacv{} ", cameraDto.getUrl());
}
/**
* 发送帧数据
*
* @param data
*/
/*** ====== 网络发送(原样保留) ====== ***/
private void sendFrameData(byte[] data) {
// ws
for (Map.Entry<String, ChannelHandlerContext> entry : wsClients.entrySet()) {
@@ -393,7 +393,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
wsClients.remove(entry.getKey());
hasClient();
}
} catch (java.lang.Exception e) {
} catch (Exception e) {
wsClients.remove(entry.getKey());
hasClient();
e.printStackTrace();
@@ -408,7 +408,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
httpClients.remove(entry.getKey());
hasClient();
}
} catch (java.lang.Exception e) {
} catch (Exception e) {
httpClients.remove(entry.getKey());
hasClient();
e.printStackTrace();
@@ -416,13 +416,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
}
}
/**
* 判断有没有客户端,关闭流
*
* @return
*/
public void hasClient() {
int newHcSize = httpClients.size();
int newWcSize = wsClients.size();
if (hcSize != newHcSize || wcSize != newWcSize) {
@@ -430,77 +424,42 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
wcSize = newWcSize;
log.info("\r\n{}\r\nhttp连接数{}, ws连接数{} \r\n", cameraDto.getUrl(), newHcSize, newWcSize);
}
// 无需自动关闭
if (!cameraDto.isAutoClose()) {
return;
}
if (!cameraDto.isAutoClose()) return;
if (httpClients.isEmpty() && wsClients.isEmpty()) {
// 等待20秒还没有客户端则关闭推流
if (noClient > cameraDto.getNoClientsDuration()) {
closeMedia();
} else {
noClient += 1000;
// log.info("\r\n{}\r\n {} 秒自动关闭推拉流 \r\n", camera.getUrl(), noClientsDuration-noClient);
}
} else {
// 重置计时
noClient = 0;
}
}
/**
* 监听客户端,用于判断无人观看时自动关闭推流
*/
public void listenClient() {
listenThread = new Thread(new Runnable() {
public void run() {
while (running) {
hasClient();
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
listenThread = new Thread(() -> {
while (running) {
hasClient();
try { Thread.sleep(1000); } catch (InterruptedException ignored) {}
}
});
listenThread.start();
}
/**
* 关闭流媒体
*/
private void closeMedia() {
running = false;
MediaService.cameras.remove(cameraDto.getMediaKey());
// 媒体异常时,主动断开前端长连接
for (Map.Entry<String, ChannelHandlerContext> entry : wsClients.entrySet()) {
try {
entry.getValue().close();
} catch (java.lang.Exception e) {
} finally {
wsClients.remove(entry.getKey());
}
try { entry.getValue().close(); } catch (Exception ignored) {}
finally { wsClients.remove(entry.getKey()); }
}
for (Map.Entry<String, ChannelHandlerContext> entry : httpClients.entrySet()) {
try {
entry.getValue().close();
} catch (java.lang.Exception e) {
} finally {
httpClients.remove(entry.getKey());
}
try { entry.getValue().close(); } catch (Exception ignored) {}
finally { httpClients.remove(entry.getKey()); }
}
}
/**
* 新增客户端
*
* @param ctx netty client
* @param ctype enum,ClientType
*/
public void addClient(ChannelHandlerContext ctx, ClientType ctype) {
int timeout = 0;
while (true) {
@@ -508,23 +467,22 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
if (header != null) {
try {
if (ctx.channel().isWritable()) {
// 发送帧前先发送header
if (ClientType.HTTP.getType() == ctype.getType()) {
ChannelFuture future = ctx.writeAndFlush(Unpooled.copiedBuffer(header));
future.addListener(new GenericFutureListener<Future<? super Void>>() {
@Override
public void operationComplete(Future<? super Void> future) throws FrameGrabber.Exception {
public void operationComplete(Future<? super Void> future) {
if (future.isSuccess()) {
httpClients.put(ctx.channel().id().toString(), ctx);
}
}
});
} else if (ClientType.WEBSOCKET.getType() == ctype.getType()) {
ChannelFuture future = ctx
.writeAndFlush(new BinaryWebSocketFrame(Unpooled.copiedBuffer(header)));
ChannelFuture future = ctx.writeAndFlush(
new BinaryWebSocketFrame(Unpooled.copiedBuffer(header)));
future.addListener(new GenericFutureListener<Future<? super Void>>() {
@Override
public void operationComplete(Future<? super Void> future) throws FrameGrabber.Exception {
public void operationComplete(Future<? super Void> future) {
if (future.isSuccess()) {
wsClients.put(ctx.channel().id().toString(), ctx);
}
@@ -532,30 +490,19 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
});
}
}
} catch (java.lang.Exception e) {
} catch (Exception e) {
e.printStackTrace();
}
break;
}
// 等待推拉流启动
Thread.sleep(50);
// 启动录制器失败
timeout += 50;
if (timeout > 30000) {
break;
}
} catch (java.lang.Exception e) {
if (timeout > 30000) break;
} catch (Exception e) {
e.printStackTrace();
}
}
}
@Override
public void run() {
transferStream2Flv();
}
@Override public void run() { transferStream2Flv(); }
}

View File

@@ -0,0 +1,41 @@
package com.ruoyi.video.thread.detector;
import com.ruoyi.video.domain.Detection;
import org.bytedeco.opencv.opencv_core.Mat;
import java.util.*;
import java.util.concurrent.*;
public final class CompositeDetector implements YoloDetector {
private final String name;
private final List<YoloDetector> list;
private final ExecutorService pool;
public CompositeDetector(String name, List<YoloDetector> list, int parallelism) {
this.name = name;
this.list = list;
this.pool = Executors.newFixedThreadPool(Math.max(1, parallelism));
}
@Override public String name(){ return name; }
@Override
public List<Detection> detect(Mat bgr) {
if (list.isEmpty()) return Collections.emptyList();
try {
List<Callable<List<Detection>>> tasks = new ArrayList<>();
for (YoloDetector d : list) tasks.add(() -> d.detect(bgr));
List<Future<List<Detection>>> futs = pool.invokeAll(tasks);
List<Detection> all = new ArrayList<>();
for (Future<List<Detection>> f : futs) all.addAll(f.get());
return all; // 需要“跨模型 NMS”可在这里再做一次
} catch (Exception e) {
return Collections.emptyList();
}
}
@Override public void close() {
pool.shutdownNow();
list.forEach(d -> { try { d.close(); } catch (Exception ignored) {} });
}
}

View File

@@ -0,0 +1,108 @@
package com.ruoyi.video.thread.detector;
import com.ruoyi.video.domain.Detection;
import org.bytedeco.opencv.opencv_core.*;
import org.bytedeco.opencv.opencv_dnn.Net;
import java.nio.file.*;
import java.util.*;
import static org.bytedeco.opencv.global.Dnn.*;
import static org.bytedeco.opencv.global.opencv_core.*;
import static org.bytedeco.opencv.global.opencv_imgproc.*;
public final class OpenVinoYoloDetector implements YoloDetector {
private final String modelName;
private final Net net;
private final Size input;
private final float confTh = 0.25f, nmsTh = 0.45f;
private final String[] classes;
private final int colorBGR;
public OpenVinoYoloDetector(String name, Path dir, int inW, int inH, String backend, int colorBGR) throws Exception {
this.modelName = name;
this.input = new Size(inW, inH);
this.colorBGR = colorBGR;
String xml = dir.resolve("model.xml").toString();
String bin = dir.resolve("model.bin").toString();
Path clsPath = dir.resolve("classes.txt");
if (Files.exists(clsPath)) {
this.classes = Files.readAllLines(clsPath).stream().map(String::trim)
.filter(s -> !s.isEmpty()).toArray(String[]::new);
} else {
this.classes = new String[0];
}
this.net = readNetFromModelOptimizer(xml, bin);
if ("openvino".equalsIgnoreCase(backend)) {
net.setPreferableBackend(DNN_BACKEND_INFERENCE_ENGINE);
net.setPreferableTarget(DNN_TARGET_CPU);
} else {
net.setPreferableBackend(DNN_BACKEND_OPENCV);
net.setPreferableTarget(DNN_TARGET_CPU);
}
}
@Override public String name() { return modelName; }
@Override
public List<Detection> detect(Mat bgr) {
try (Mat blob = blobFromImage(bgr, 1.0/255.0, input, new Scalar(0.0), true, false, CV_32F)) {
net.setInput(blob);
Mat out = new Mat();
net.forward(out); // 常见: [1,N,C] or [N,C]
Mat m = out.reshape(1, (int)out.total() / out.size(2));
int fw = bgr.cols(), fh = bgr.rows();
FloatRawIndexer idx = m.createIndexer();
int N = m.rows(), C = m.cols();
List<Rect2d> boxes = new ArrayList<>();
List<Float> scores = new ArrayList<>();
List<Integer> classIds = new ArrayList<>();
for (int i = 0; i < N; i++) {
float cx = idx.get(i,0), cy=idx.get(i,1), w=idx.get(i,2), h=idx.get(i,3);
float obj = idx.get(i,4);
int best=-1; float pmax=0f;
for (int c=5;c<C;c++) { float p=idx.get(i,c); if (p>pmax){pmax=p; best=c-5;} }
float conf = obj * pmax;
if (conf < confTh) continue;
int bx = Math.max(0, Math.round(cx*fw - (w*fw)/2f));
int by = Math.max(0, Math.round(cy*fh - (h*fh)/2f));
int bw = Math.min(fw-bx, Math.round(w*fw));
int bh = Math.min(fh-by, Math.round(h*fh));
if (bw<=0 || bh<=0) continue;
boxes.add(new Rect2d(bx,by,bw,bh));
scores.add(conf);
classIds.add(best);
}
// NMS
MatOfRect2d b = new MatOfRect2d(boxes.toArray(new Rect2d[0]));
MatOfFloat s = new MatOfFloat(toArray(scores));
MatOfInt keep = new MatOfInt();
NMSBoxes(b, s, confTh, nmsTh, keep);
List<Detection> outList = new ArrayList<>();
IntRawIndexer kidx = keep.createIndexer();
for (int i=0;i<keep.rows();i++){
int k = kidx.get(i);
Rect2d r = boxes.get(k);
Rect rect = new Rect((int)r.x(), (int)r.y(), (int)r.width(), (int)r.height());
String cname = (classIds.get(k)>=0 && classIds.get(k)<classes.length)
? classes[classIds.get(k)] : "cls"+classIds.get(k);
outList.add(new Detection("["+modelName+"] "+cname, scores.get(k), rect, colorBGR));
}
return outList;
}
}
private static float[] toArray(List<Float> ls){ float[] a=new float[ls.size()]; for(int i=0;i<ls.size();i++) a[i]=ls.get(i); return a; }
@Override public void close(){ net.close(); }
}

View File

@@ -0,0 +1,12 @@
package com.ruoyi.video.thread.detector;
import com.ruoyi.video.domain.Detection;
import org.bytedeco.opencv.opencv_core.Mat;
import java.util.List;
public interface YoloDetector extends AutoCloseable {
String name();
List<Detection> detect(Mat bgr);
@Override default void close() {}
}

View File

@@ -0,0 +1,27 @@
package com.ruoyi.video.utils;
import com.ruoyi.video.domain.Detection;
import org.bytedeco.opencv.opencv_core.*;
import static org.bytedeco.opencv.global.opencv_imgproc.*;
import java.util.List;
public final class Overlay {
private Overlay(){}
public static void draw(List<Detection> dets, Mat frame) {
for (Detection d : dets) {
Rect r = d.box();
int bgr = d.colorBGR();
Scalar c = new Scalar(bgr & 0xFF, (bgr >> 8) & 0xFF, (bgr >> 16) & 0xFF, 0);
rectangle(frame, r, c, 2, LINE_8, 0);
String label = d.cls()+" "+String.format("%.2f", d.conf());
int[] baseline = new int[1];
Size t = getTextSize(label, FONT_HERSHEY_SIMPLEX, 0.5, 1, baseline);
int x = Math.max(0, r.x());
int y = Math.max(t.height(), r.y()-4);
rectangle(frame, new Rect(x, y-t.height()-4, t.width()+6, t.height()+6), c, FILLED, 0, 0);
putText(frame, label, new Point(x+3, y), FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0,0,0,0), 1, LINE_AA, false);
}
}
}

View File

@@ -0,0 +1 @@
trash

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,4 @@
[
{"name":"smoke","path":"models/smoke","size":[640,640],"backend":"openvino"},
{"name":"garbage","path":"models/garbage","size":[640,640],"backend":"openvino"}
]

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1 @@
smoke

View File

@@ -0,0 +1,14 @@
description: Ultralytics best model trained on /data/datasets/smoke_group/data.yaml
author: Ultralytics
date: '2024-12-10T17:17:37.306155'
version: 8.2.86
license: AGPL-3.0 License (https://ultralytics.com/license)
docs: https://docs.ultralytics.com
stride: 32
task: detect
batch: 1
imgsz:
- 640
- 640
names:
0: smoke