diff --git a/bin/start-rtsp-server.bat b/bin/start-rtsp-server.bat
new file mode 100644
index 0000000..977208e
--- /dev/null
+++ b/bin/start-rtsp-server.bat
@@ -0,0 +1,20 @@
+@echo off
+echo 启动RTSP测试服务器...
+echo.
+echo 请确保已安装FFmpeg
+echo.
+
+REM 使用FFmpeg创建一个简单的RTSP服务器
+REM 这里使用测试视频文件,你可以替换为实际的视频文件路径
+set VIDEO_FILE=test.mp4
+
+REM 如果没有测试视频,创建一个测试模式
+if not exist "%VIDEO_FILE%" (
+ echo 创建测试视频流...
+ ffmpeg -f lavfi -i testsrc=duration=3600:size=640x480:rate=25 -f rtsp rtsp://localhost:8554/11
+) else (
+ echo 使用视频文件: %VIDEO_FILE%
+ ffmpeg -re -i "%VIDEO_FILE%" -c copy -f rtsp rtsp://localhost:8554/11
+)
+
+pause
\ No newline at end of file
diff --git a/bin/start-rtsp-server.sh b/bin/start-rtsp-server.sh
new file mode 100644
index 0000000..34d3496
--- /dev/null
+++ b/bin/start-rtsp-server.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+echo "启动RTSP测试服务器..."
+echo
+
+# 检查FFmpeg是否安装
+if ! command -v ffmpeg &> /dev/null; then
+ echo "错误: FFmpeg未安装,请先安装FFmpeg"
+ echo "Ubuntu/Debian: sudo apt install ffmpeg"
+ echo "CentOS/RHEL: sudo yum install ffmpeg"
+ exit 1
+fi
+
+# 视频文件路径
+VIDEO_FILE="test.mp4"
+
+# 如果没有测试视频,创建一个测试模式
+if [ ! -f "$VIDEO_FILE" ]; then
+ echo "创建测试视频流..."
+ ffmpeg -f lavfi -i testsrc=duration=3600:size=640x480:rate=25 -c:v libx264 -preset ultrafast -f rtsp rtsp://localhost:8554/11
+else
+ echo "使用视频文件: $VIDEO_FILE"
+ ffmpeg -re -i "$VIDEO_FILE" -c copy -f rtsp rtsp://localhost:8554/11
+fi
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 532173d..c20c466 100644
--- a/pom.xml
+++ b/pom.xml
@@ -42,7 +42,7 @@
org.bytedeco
javacv-platform
- 1.5.9
+ 1.5.10
diff --git a/ruoyi-admin/pom.xml b/ruoyi-admin/pom.xml
index 37872e7..1450169 100644
--- a/ruoyi-admin/pom.xml
+++ b/ruoyi-admin/pom.xml
@@ -63,9 +63,8 @@
spring-context
- org.bytedeco
- javacv-platform
- 1.5.9
+ org.springframework
+ spring-context
diff --git a/ruoyi-admin/src/main/resources/application.yml b/ruoyi-admin/src/main/resources/application.yml
index e0a5952..daa28a0 100644
--- a/ruoyi-admin/src/main/resources/application.yml
+++ b/ruoyi-admin/src/main/resources/application.yml
@@ -152,3 +152,17 @@ mediasServer:
arcFace:
appId: '替换成你的appId'
sdkKey: '替换成你的sdkKey'
+
+# 视频分析配置
+video:
+ # 是否启用AI检测功能
+ detection:
+ enabled: false
+ # RTSP配置
+ rtsp:
+ # 连接超时时间(微秒)
+ timeout: 10000000
+ # 传输协议 tcp/udp
+ transport: tcp
+ # 重试次数
+ retryCount: 3
diff --git a/ruoyi-video/pom.xml b/ruoyi-video/pom.xml
index 7700158..99ffcb2 100644
--- a/ruoyi-video/pom.xml
+++ b/ruoyi-video/pom.xml
@@ -18,12 +18,7 @@
-
- org.bytedeco
- javacv-platform
- 1.5.9
-
-
+
com.ruoyi
ruoyi-common
@@ -33,20 +28,25 @@
org.projectlombok
lombok
1.18.24
+ provided
+
org.bytedeco
- javacv
- 1.5.5
+ javacv-platform
+ 1.5.10
+
+
- org.bytedeco
- ffmpeg-platform
- 4.3.2-1.5.5
+ com.fasterxml.jackson.core
+ jackson-databind
+ 2.17.1
+
cn.hutool
hutool-all
@@ -59,56 +59,20 @@
4.1.59.Final
-
+
commons-io
commons-io
2.11.0
-
+
+
commons-lang
commons-lang
2.6
-
-
-
-
-
-
-
-
-
-
-
- org.bytedeco
- javacv-platform
- 1.5.10
-
-
-
-
- com.fasterxml.jackson.core
- jackson-databind
- 2.17.1
-
-
-
- org.bytedeco
- javacv-platform
- 1.5.10
-
-
-
-
- org.bytedeco
- opencv-platform
- 4.9.0-1.5.10
-
-
-
\ No newline at end of file
diff --git a/ruoyi-video/src/main/java/com/ruoyi/video/common/ModelManager.java b/ruoyi-video/src/main/java/com/ruoyi/video/common/ModelManager.java
index 253662a..54cc4cd 100644
--- a/ruoyi-video/src/main/java/com/ruoyi/video/common/ModelManager.java
+++ b/ruoyi-video/src/main/java/com/ruoyi/video/common/ModelManager.java
@@ -32,7 +32,8 @@ public final class ModelManager implements AutoCloseable {
int rgb = palette[i % palette.length]; i++;
int bgr = ((rgb & 0xFF) << 16) | (rgb & 0xFF00) | ((rgb >> 16) & 0xFF);
- YoloDetector det = new OpenVinoYoloDetector(name, dir, w, h, backend, bgr);
+ // 使用OnnxYoloDetector替代OpenVinoYoloDetector
+ YoloDetector det = new OnnxYoloDetector(name, dir, w, h, backend, bgr);
map.put(name, det);
}
}
@@ -44,4 +45,4 @@ public final class ModelManager implements AutoCloseable {
map.values().forEach(d -> { try { d.close(); } catch(Exception ignored){} });
map.clear();
}
-}
+}
\ No newline at end of file
diff --git a/ruoyi-video/src/main/java/com/ruoyi/video/service/ImageStoreService.java b/ruoyi-video/src/main/java/com/ruoyi/video/service/ImageStoreService.java
new file mode 100644
index 0000000..815ed0e
--- /dev/null
+++ b/ruoyi-video/src/main/java/com/ruoyi/video/service/ImageStoreService.java
@@ -0,0 +1,17 @@
+package com.ruoyi.video.service;
+
+import org.bytedeco.opencv.opencv_core.Mat;
+
+/**
+ * 截取“叠好框的最新一帧”并存证(文件系统 / 数据库BLOB)
+ */
+public interface ImageStoreService {
+
+ /**
+ * 从指定 device 的推流实例中,读取“叠好框”的最新一帧并保存。
+ * @param deviceId 设备ID
+ * @return 文件路径,或 "db://image/{id}"
+ */
+ String saveLastAnnotatedFrame(Long deviceId);
+
+}
diff --git a/ruoyi-video/src/main/java/com/ruoyi/video/service/MediaService.java b/ruoyi-video/src/main/java/com/ruoyi/video/service/MediaService.java
index 0d9a20d..14f7c04 100644
--- a/ruoyi-video/src/main/java/com/ruoyi/video/service/MediaService.java
+++ b/ruoyi-video/src/main/java/com/ruoyi/video/service/MediaService.java
@@ -8,9 +8,11 @@ import com.ruoyi.video.thread.MediaTransfer;
import com.ruoyi.video.thread.MediaTransferFlvByFFmpeg;
import com.ruoyi.video.thread.MediaTransferFlvByJavacv;
import io.netty.channel.ChannelHandlerContext;
-import java.util.concurrent.ConcurrentHashMap;
import org.springframework.stereotype.Service;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.Consumer;
+
/**
* 媒体服务,支持全局网络超时、读写超时、无人拉流持续时长自动关闭流等配置
* @Author: orange
@@ -24,20 +26,19 @@ public class MediaService {
*/
public static ConcurrentHashMap cameras = new ConcurrentHashMap<>();
-
/**
* http-flv播放
- * @param cameraDto
- * @param ctx
+ * @param cameraDto 摄像头配置
+ * @param ctx Netty上下文
*/
public void playForHttp(CameraDto cameraDto, ChannelHandlerContext ctx) {
if (cameras.containsKey(cameraDto.getMediaKey())) {
MediaTransfer mediaConvert = cameras.get(cameraDto.getMediaKey());
- if(mediaConvert instanceof MediaTransferFlvByJavacv) {
+ if (mediaConvert instanceof MediaTransferFlvByJavacv) {
MediaTransferFlvByJavacv mediaTransferFlvByJavacv = (MediaTransferFlvByJavacv) mediaConvert;
//如果当前已经用ffmpeg,则重新拉流
- if(cameraDto.isEnabledFFmpeg()) {
+ if (cameraDto.isEnabledFFmpeg()) {
mediaTransferFlvByJavacv.setRunning(false);
cameras.remove(cameraDto.getMediaKey());
this.playForHttp(cameraDto, ctx);
@@ -47,7 +48,7 @@ public class MediaService {
} else if (mediaConvert instanceof MediaTransferFlvByFFmpeg) {
MediaTransferFlvByFFmpeg mediaTransferFlvByFFmpeg = (MediaTransferFlvByFFmpeg) mediaConvert;
//如果当前已经用javacv,则关闭再重新拉流
- if(!cameraDto.isEnabledFFmpeg()) {
+ if (!cameraDto.isEnabledFFmpeg()) {
mediaTransferFlvByFFmpeg.stopFFmpeg();
cameras.remove(cameraDto.getMediaKey());
this.playForHttp(cameraDto, ctx);
@@ -57,7 +58,7 @@ public class MediaService {
}
} else {
- if(cameraDto.isEnabledFFmpeg()) {
+ if (cameraDto.isEnabledFFmpeg()) {
MediaTransferFlvByFFmpeg mediaft = new MediaTransferFlvByFFmpeg(cameraDto);
mediaft.execute();
cameras.put(cameraDto.getMediaKey(), mediaft);
@@ -74,17 +75,17 @@ public class MediaService {
/**
* ws-flv播放
- * @param cameraDto
- * @param ctx
+ * @param cameraDto 摄像头配置
+ * @param ctx Netty上下文
*/
public void playForWs(CameraDto cameraDto, ChannelHandlerContext ctx) {
if (cameras.containsKey(cameraDto.getMediaKey())) {
MediaTransfer mediaConvert = cameras.get(cameraDto.getMediaKey());
- if(mediaConvert instanceof MediaTransferFlvByJavacv) {
+ if (mediaConvert instanceof MediaTransferFlvByJavacv) {
MediaTransferFlvByJavacv mediaTransferFlvByJavacv = (MediaTransferFlvByJavacv) mediaConvert;
//如果当前已经用ffmpeg,则重新拉流
- if(cameraDto.isEnabledFFmpeg()) {
+ if (cameraDto.isEnabledFFmpeg()) {
mediaTransferFlvByJavacv.setRunning(false);
cameras.remove(cameraDto.getMediaKey());
this.playForWs(cameraDto, ctx);
@@ -94,7 +95,7 @@ public class MediaService {
} else if (mediaConvert instanceof MediaTransferFlvByFFmpeg) {
MediaTransferFlvByFFmpeg mediaTransferFlvByFFmpeg = (MediaTransferFlvByFFmpeg) mediaConvert;
//如果当前已经用javacv,则关闭再重新拉流
- if(!cameraDto.isEnabledFFmpeg()) {
+ if (!cameraDto.isEnabledFFmpeg()) {
mediaTransferFlvByFFmpeg.stopFFmpeg();
cameras.remove(cameraDto.getMediaKey());
this.playForWs(cameraDto, ctx);
@@ -103,7 +104,7 @@ public class MediaService {
}
}
} else {
- if(cameraDto.isEnabledFFmpeg()) {
+ if (cameraDto.isEnabledFFmpeg()) {
MediaTransferFlvByFFmpeg mediaft = new MediaTransferFlvByFFmpeg(cameraDto);
mediaft.execute();
cameras.put(cameraDto.getMediaKey(), mediaft);
@@ -119,8 +120,8 @@ public class MediaService {
/**
* api播放
- * @param cameraDto
- * @return
+ * @param cameraDto 摄像头配置
+ * @return 是否启动成功
*/
public boolean playForApi(CameraDto cameraDto) {
// 区分不同媒体
@@ -130,7 +131,7 @@ public class MediaService {
MediaTransfer mediaTransfer = cameras.get(cameraDto.getMediaKey());
if (null == mediaTransfer) {
- if(cameraDto.isEnabledFFmpeg()) {
+ if (cameraDto.isEnabledFFmpeg()) {
MediaTransferFlvByFFmpeg mediaft = new MediaTransferFlvByFFmpeg(cameraDto);
mediaft.execute();
cameras.put(cameraDto.getMediaKey(), mediaft);
@@ -143,7 +144,7 @@ public class MediaService {
mediaTransfer = cameras.get(cameraDto.getMediaKey());
//同步等待
- if(mediaTransfer instanceof MediaTransferFlvByJavacv) {
+ if (mediaTransfer instanceof MediaTransferFlvByJavacv) {
MediaTransferFlvByJavacv mediaft = (MediaTransferFlvByJavacv) mediaTransfer;
// 30秒还没true认为启动不了
for (int i = 0; i < 60; i++) {
@@ -153,6 +154,7 @@ public class MediaService {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
+ // ignore
}
}
} else if (mediaTransfer instanceof MediaTransferFlvByFFmpeg) {
@@ -165,6 +167,7 @@ public class MediaService {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
+ // ignore
}
}
}
@@ -173,14 +176,14 @@ public class MediaService {
/**
* 关闭流
- * @param cameraDto
+ * @param cameraDto 摄像头配置
*/
public void closeForApi(CameraDto cameraDto) {
cameraDto.setEnabledFlv(false);
if (cameras.containsKey(cameraDto.getMediaKey())) {
MediaTransfer mediaConvert = cameras.get(cameraDto.getMediaKey());
- if(mediaConvert instanceof MediaTransferFlvByJavacv) {
+ if (mediaConvert instanceof MediaTransferFlvByJavacv) {
MediaTransferFlvByJavacv mediaTransferFlvByJavacv = (MediaTransferFlvByJavacv) mediaConvert;
mediaTransferFlvByJavacv.setRunning(false);
cameras.remove(cameraDto.getMediaKey());
@@ -192,4 +195,62 @@ public class MediaService {
}
}
+ /* =========================== 新增便捷方法 =========================== */
+
+ /** 直接从缓存取 MediaTransfer(可能是 FFmpeg 或 JavaCV)。不存在返回 null。 */
+ public MediaTransfer getMedia(String mediaKey) {
+ return cameras.get(mediaKey);
+ }
+
+ /** 只取 JavaCV 实例;如果不是 JavaCV 或不存在则返回 null。 */
+ public MediaTransferFlvByJavacv getJavacv(String mediaKey) {
+ MediaTransfer mt = cameras.get(mediaKey);
+ return (mt instanceof MediaTransferFlvByJavacv) ? (MediaTransferFlvByJavacv) mt : null;
+ }
+
+ /**
+ * 取或启动 JavaCV 实例:
+ * - 已有 JavaCV:直接返回
+ * - 已有 FFmpeg:先停止 FFmpeg,再切换 JavaCV
+ * - 不存在:启动 JavaCV
+ *
+ * @param cameraDto 需包含 url / mediaKey(mediaKey 为空则用 url 的 MD5 生成)
+ * @param beforeStart 启动前对 cameraDto 做一次定制(可 null),例如 dto -> dto.setEnableDetection(true)
+ */
+ public MediaTransferFlvByJavacv getOrStartJavacv(CameraDto cameraDto, Consumer beforeStart) {
+ // 兜底 mediaKey
+ if (cameraDto.getMediaKey() == null || cameraDto.getMediaKey().isEmpty()) {
+ String mediaKey = MD5.create().digestHex(cameraDto.getUrl());
+ cameraDto.setMediaKey(mediaKey);
+ }
+
+ MediaTransfer mt = cameras.get(cameraDto.getMediaKey());
+ if (mt instanceof MediaTransferFlvByJavacv) {
+ return (MediaTransferFlvByJavacv) mt;
+ }
+
+ // 若已存在 FFmpeg 实例,先停掉
+ if (mt instanceof MediaTransferFlvByFFmpeg) {
+ ((MediaTransferFlvByFFmpeg) mt).stopFFmpeg();
+ cameras.remove(cameraDto.getMediaKey());
+ }
+
+ // 启动 JavaCV
+ if (beforeStart != null) beforeStart.accept(cameraDto);
+ MediaTransferFlvByJavacv mediaConvert = new MediaTransferFlvByJavacv(cameraDto);
+ cameras.put(cameraDto.getMediaKey(), mediaConvert);
+ ThreadUtil.execute(mediaConvert);
+ return mediaConvert;
+ }
+
+ /** 可选:根据 mediaKey 强制停止并移除(两种实现都兼容) */
+ public void stopByMediaKey(String mediaKey) {
+ MediaTransfer mt = cameras.get(mediaKey);
+ if (mt instanceof MediaTransferFlvByJavacv) {
+ ((MediaTransferFlvByJavacv) mt).setRunning(false);
+ } else if (mt instanceof MediaTransferFlvByFFmpeg) {
+ ((MediaTransferFlvByFFmpeg) mt).stopFFmpeg();
+ }
+ cameras.remove(mediaKey);
+ }
}
diff --git a/ruoyi-video/src/main/java/com/ruoyi/video/service/impl/FileImageStoreServiceImpl.java b/ruoyi-video/src/main/java/com/ruoyi/video/service/impl/FileImageStoreServiceImpl.java
new file mode 100644
index 0000000..dc8a73d
--- /dev/null
+++ b/ruoyi-video/src/main/java/com/ruoyi/video/service/impl/FileImageStoreServiceImpl.java
@@ -0,0 +1,126 @@
+package com.ruoyi.video.service.impl;
+
+import com.ruoyi.common.config.RuoYiConfig;
+import com.ruoyi.common.utils.file.FileUploadUtils;
+import com.ruoyi.video.domain.Device;
+import com.ruoyi.video.service.IDeviceService;
+import com.ruoyi.video.service.ImageStoreService;
+import com.ruoyi.video.service.MediaService;
+import com.ruoyi.video.thread.MediaTransferFlvByJavacv;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.bytedeco.javacpp.BytePointer;
+import org.bytedeco.opencv.opencv_core.Mat;
+import org.springframework.stereotype.Service;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+
+import static org.bytedeco.opencv.global.opencv_imgcodecs.imencode;
+
+/**
+ * 后端从视频流抓帧 -> JPEG -> 包成 MultipartFile -> 交给 FileUploadUtils.upload 存储。
+ */
+@Slf4j
+@Service
+@RequiredArgsConstructor
+public class FileImageStoreServiceImpl implements ImageStoreService {
+
+ private final IDeviceService deviceService;
+ private final MediaService mediaService;
+
+ @Override
+ public String saveLastAnnotatedFrame(Long deviceId) {
+ // 1) 定位设备 & JavaCV 实例
+ Device device = deviceService.selectDeviceByDeviceId(deviceId);
+ if (device == null) throw new IllegalArgumentException("device not found: " + deviceId);
+
+ MediaTransferFlvByJavacv mt = mediaService.getJavacv(device.getMediaKey());
+ if (mt == null) {
+ throw new IllegalStateException("media (javacv) not running for mediaKey=" + device.getMediaKey());
+ }
+
+ // 2) 取“叠好框”的最近一帧
+ Mat mat = mt.getLatestAnnotatedFrameCopy();
+ if (mat == null || mat.empty()) {
+ throw new IllegalStateException("no annotated frame available currently.");
+ }
+
+ try {
+ // 3) 编码为 JPEG 字节
+ BytePointer buf = new BytePointer();
+ if (!imencode(".jpg", mat, buf) || buf.isNull() || buf.limit() <= 0) {
+ throw new IllegalStateException("encode jpeg failed.");
+ }
+ byte[] bytes = new byte[(int) buf.limit()];
+ buf.get(bytes);
+
+ // 4) 计算保存目录:{profile}/snapshots/device-{deviceId}/
+ String profile = RuoYiConfig.getProfile();
+ String uploadBaseDir = Paths.get(profile, "snapshots", "device-" + deviceId).toString();
+
+ // 5) 生成文件名(给 MultipartFile 用;最终 FileUploadUtils 会做统一命名与日期分桶)
+ String fileName = buildFileName(deviceId);
+
+ // 6) 把字节包成 MultipartFile,走若依工具存储
+ MultipartFile multipart = new InMemoryMultipartFile(
+ "file",
+ fileName,
+ "image/jpeg",
+ bytes
+ );
+
+ String stored = FileUploadUtils.upload(uploadBaseDir, multipart);
+ log.info("snapshot saved by FileUploadUtils: {}", stored);
+ return stored; // 形如 /profile/snapshots/device-x/20250927/xxx.jpg
+ } catch (Exception e) {
+ log.error("saveLastAnnotatedFrame failed: {}", e.getMessage(), e);
+ throw new RuntimeException("save snapshot failed", e);
+ } finally {
+ try { mat.release(); } catch (Exception ignore) {}
+ }
+ }
+
+ private String buildFileName(Long deviceId) {
+ ZonedDateTime now = ZonedDateTime.now(ZoneId.systemDefault());
+ String ts = now.format(DateTimeFormatter.ofPattern("yyyyMMdd_HHmmss_SSS"));
+ return "cam" + deviceId + "_" + ts + ".jpg";
+ }
+
+ /**
+ * 轻量内存 MultipartFile,不依赖 spring-test。
+ */
+ static class InMemoryMultipartFile implements MultipartFile {
+ private final String name;
+ private final String originalFilename;
+ private final String contentType;
+ private final byte[] content;
+
+ InMemoryMultipartFile(String name, String originalFilename, String contentType, byte[] content) {
+ this.name = name;
+ this.originalFilename = originalFilename;
+ this.contentType = contentType;
+ this.content = content != null ? content : new byte[0];
+ }
+
+ @Override public String getName() { return name; }
+ @Override public String getOriginalFilename() { return originalFilename; }
+ @Override public String getContentType() { return contentType; }
+ @Override public boolean isEmpty() { return content.length == 0; }
+ @Override public long getSize() { return content.length; }
+ @Override public byte[] getBytes() { return content; }
+ @Override public InputStream getInputStream() { return new ByteArrayInputStream(content); }
+ @Override public void transferTo(java.io.File dest) throws IOException {
+ try (var in = getInputStream(); var out = new java.io.FileOutputStream(dest)) {
+ in.transferTo(out);
+ }
+ }
+ }
+}
diff --git a/ruoyi-video/src/main/java/com/ruoyi/video/service/impl/InspectionTaskServiceImpl.java b/ruoyi-video/src/main/java/com/ruoyi/video/service/impl/InspectionTaskServiceImpl.java
index 0a2f639..7f92fdf 100644
--- a/ruoyi-video/src/main/java/com/ruoyi/video/service/impl/InspectionTaskServiceImpl.java
+++ b/ruoyi-video/src/main/java/com/ruoyi/video/service/impl/InspectionTaskServiceImpl.java
@@ -181,7 +181,7 @@ public class InspectionTaskServiceImpl implements InspectionTaskService {
// 初始化模型管理器
if (modelManager == null) {
modelManager = new ModelManager();
- URL json = getClass().getResource("/models/models.json");
+ URL json = getClass().getResource("/libs/models/models.json");
if (json != null) {
modelManager.load(json);
}
diff --git a/ruoyi-video/src/main/java/com/ruoyi/video/thread/MediaTransferFlvByJavacv.java b/ruoyi-video/src/main/java/com/ruoyi/video/thread/MediaTransferFlvByJavacv.java
index e76400a..53c580c 100644
--- a/ruoyi-video/src/main/java/com/ruoyi/video/thread/MediaTransferFlvByJavacv.java
+++ b/ruoyi-video/src/main/java/com/ruoyi/video/thread/MediaTransferFlvByJavacv.java
@@ -1,51 +1,81 @@
package com.ruoyi.video.thread;
import com.ruoyi.video.common.ClientType;
-import com.ruoyi.video.common.ModelManager; // ★ 新增:多模型管理(见前面提供的类)
-import com.ruoyi.video.domain.Detection; // ★ 新增:检测结果(见前面提供的类)
+import com.ruoyi.video.common.ModelManager;
+import com.ruoyi.video.domain.Detection;
import com.ruoyi.video.domain.dto.CameraDto;
import com.ruoyi.video.service.MediaService;
-import com.ruoyi.video.thread.detector.CompositeDetector; // ★ 新增:并行多模型
-import com.ruoyi.video.thread.detector.YoloDetector; // ★ 新增:检测接口
-import com.ruoyi.video.utils.Overlay; // ★ 新增:画框工具
+import com.ruoyi.video.thread.detector.CompositeDetector;
+import com.ruoyi.video.thread.detector.YoloDetector;
+import com.ruoyi.video.utils.Overlay;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.websocketx.BinaryWebSocketFrame;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.GenericFutureListener;
+import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.bytedeco.ffmpeg.avcodec.AVPacket;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.*;
import org.bytedeco.opencv.opencv_core.Mat;
-import org.springframework.scheduling.annotation.Async;
+import org.springframework.util.CollectionUtils;
import java.io.ByteArrayOutputStream;
-import java.io.IOException;
import java.net.URL;
+import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.LockSupport;
+import java.util.Collections;
import static org.bytedeco.opencv.global.opencv_core.CV_8UC3;
/**
- * @Author: orange
- * @CreateTime: 2025-01-16
+ * 推流(FLV) + JavaCV 解码/转码 + (可选)YOLO 检测叠框
+ * - 支持“窗口巡检”:在给定秒数内启用推理与统计,并通过 DetectionListener 回调让上层落库/告警
+ * - 播放开始可触发 10 秒试跑:attachDetectionListener(jobId, deviceId, 10, listener)
+ *
+ * 依赖:ModelManager / YoloDetector / CompositeDetector / Detection / Overlay / MediaService / CameraDto / ClientType
+ *
+ * @author orange
+ * @since 2025-01-16
*/
@Slf4j
public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable {
+ /* ===================== 内部回调/统计类型(如已外部定义,可移除) ===================== */
+
+ public interface DetectionListener {
+ /** 每次推理得到 detections 时回调(建议上层做节流) */
+ void onDetections(Long jobId, Long deviceId, List detections, long frameTsMs);
+ /** 一个“窗口巡检”结束时回调(含统计数据) */
+ void onWindowFinished(Long jobId, Long deviceId, WindowStats stats);
+ }
+
+ @Data
+ public static class WindowStats {
+ private int frames;
+ private int detectedFrames;
+ private int objects;
+ private double maxScore;
+ private long startMs;
+ private long endMs;
+ }
+
+ /* ===================== FFmpeg/JavaCV 初始化 ===================== */
+
static {
avutil.av_log_set_level(avutil.AV_LOG_ERROR);
FFmpegLogCallback.set();
}
- /*** ====== 原有字段 ====== ***/
+ /* ===================== 原有字段 ===================== */
+
private final ConcurrentHashMap wsClients = new ConcurrentHashMap<>();
private final ConcurrentHashMap httpClients = new ConcurrentHashMap<>();
@@ -61,76 +91,170 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
private FFmpegFrameGrabber grabber; // 拉流器
private FFmpegFrameRecorder recorder; // 推流录制器
- /** true:转复用,false:转码 */
- private boolean transferFlag = false; // 默认转码
+ /** true: 转复用;false: 转码。启用检测时强制转码(要在像素上叠框) */
+ private boolean transferFlag = false;
private final CameraDto cameraDto;
private Thread listenThread;
- /*** ====== 新增:推理相关字段 ====== ***/
- // 开关:是否启用检测(可对外提供 setter)
+ /* ===================== 推理相关字段 ===================== */
+
+ // 外部开关:是否启用检测(默认启用;也可由任务/页面配置动态设置)
private boolean enableDetection = true;
- // 模型与推理
private ModelManager modelManager;
private YoloDetector detector;
- // 三线程解耦所需
+ // 解码/推理/发送解耦
private final OpenCVFrameConverter.ToMat toMat = new OpenCVFrameConverter.ToMat();
+ private final OpenCVFrameConverter.ToMat matToFrameConverter = new OpenCVFrameConverter.ToMat();
private final AtomicReference latestFrame = new AtomicReference<>();
private final AtomicReference> latestDetections =
new AtomicReference<>(java.util.Collections.emptyList());
+ // 窗口巡检控制
+ private volatile boolean windowMode = false;
+ private volatile long windowEndMs = 0L;
+ private Long currentJobId;
+ private Long currentDeviceId;
+ private DetectionListener detectionListener;
+ private final WindowStats stats = new WindowStats();
+
+ // 导出最近一次“叠好框的帧”用于截图存证
+ private final AtomicReference latestAnnotatedFrame = new AtomicReference<>();
+
public MediaTransferFlvByJavacv(CameraDto cameraDto) {
super();
this.cameraDto = cameraDto;
}
+ public void setRunning(boolean running) {
+ boolean prev = this.running;
+ this.running = running;
+ // 如果是从 true -> false,则按“关闭”处理
+ if (prev && !running) {
+ try {
+ closeMedia(); // 内部会 stopWindowIfAny()、关闭连接等
+ } catch (Exception ignore) {}
+ }
+ }
+
+ /** 推荐的新接口:显式停止并释放资源 */
+ public void stop() {
+ setRunning(false);
+ }
+
+ /* ===================== 外部控制 API ===================== */
+
public boolean isRunning() { return running; }
- public void setRunning(boolean running) { this.running = running; }
public boolean isGrabberStatus() { return grabberStatus; }
- public void setGrabberStatus(boolean grabberStatus) { this.grabberStatus = grabberStatus; }
public boolean isRecorderStatus() { return recorderStatus; }
- public void setRecorderStatus(boolean recorderStatus) { this.recorderStatus = recorderStatus; }
public void setEnableDetection(boolean enable) { this.enableDetection = enable; }
- /*** ====== 推理初始化 ====== ***/
+ /**
+ * 开启一个“窗口巡检”,持续 windowSeconds 秒;期间每次推理回调 onDetections,结束时 onWindowFinished
+ */
+ public void attachDetectionListener(Long jobId, Long deviceId, int windowSeconds, DetectionListener listener) {
+ if (windowSeconds <= 0 || listener == null) return;
+ this.currentJobId = jobId;
+ this.currentDeviceId = deviceId;
+ this.detectionListener = listener;
+ this.windowMode = true;
+ long now = System.currentTimeMillis();
+ this.stats.setStartMs(now);
+ this.windowEndMs = now + windowSeconds * 1000L;
+ this.stats.setFrames(0);
+ this.stats.setDetectedFrames(0);
+ this.stats.setObjects(0);
+ this.stats.setMaxScore(0.0);
+ log.info("[job:{} device:{}] window started {}s", jobId, deviceId, windowSeconds);
+ }
+
+ /** 主动结束当前窗口(可用于任务被中断的场景) */
+ public void stopWindowIfAny() {
+ if (!windowMode) return;
+ this.windowMode = false;
+ stats.setEndMs(System.currentTimeMillis());
+ if (detectionListener != null && currentJobId != null && currentDeviceId != null) {
+ try {
+ detectionListener.onWindowFinished(currentJobId, currentDeviceId, cloneStats(stats));
+ } catch (Exception ignore) {}
+ }
+ currentJobId = null;
+ currentDeviceId = null;
+ detectionListener = null;
+ log.info("window finished (stopWindowIfAny)");
+ }
+
+ /** 导出最近一次“叠好框的帧”(深拷贝),用于截图/存证。调用方负责释放 Mat */
+ public Mat getLatestAnnotatedFrameCopy() {
+ Mat src = latestAnnotatedFrame.get();
+ if (src == null || src.empty()) return null;
+ Mat copy = new Mat(src.rows(), src.cols(), src.type());
+ src.copyTo(copy);
+ return copy;
+ }
+
+ /* ===================== 初始化推理 ===================== */
+
private void initDetectors() throws Exception {
if (!enableDetection) return;
+
modelManager = new ModelManager();
URL json = getClass().getResource("/models/models.json");
modelManager.load(json);
- // 单模型: detector = modelManager.get("person-helmet");
- // 多模型并行(示例),并行度按CPU核数/模型大小调整:
+ // 你可按需切换单模型或多模型并行
+ // detector = modelManager.get("person-helmet");
detector = new CompositeDetector(
"all-models",
- java.util.List.of(modelManager.get("person-helmet"), modelManager.get("vehicle-plate")),
- 2
+ java.util.List.of(
+ modelManager.get("person-helmet"),
+ modelManager.get("vehicle-plate")
+ ),
+ 2 // 并行度
);
log.info("YOLO detectors ready: {}", detector.name());
+
+ // 预热一次,避免前几帧“无框”
+ try {
+ Frame warm = grabber != null ? grabber.grabImage() : null;
+ if (warm != null) {
+ Mat wm = toMat.convert(warm);
+ if (wm != null && !wm.empty()) {
+ long t0 = System.currentTimeMillis();
+ List dets = detector.detect(wm);
+ long cost = System.currentTimeMillis() - t0;
+ latestDetections.set(dets);
+ log.info("Detector warm-up OK, cost={}ms, dets={}", cost,
+ CollectionUtils.isEmpty(dets) ? 0 : dets.size());
+ }
+ }
+ } catch (Throwable e) {
+ log.warn("Detector warm-up failed: {}", e.getMessage());
+ }
}
- /*** ====== 拉流器 ====== ***/
+ /* ===================== 拉流/推流 ===================== */
+
protected boolean createGrabber() {
grabber = new FFmpegFrameGrabber(cameraDto.getUrl());
- // 注意:这些是微秒字符串
String fiveSecUs = "5000000";
String oneMb = "1048576";
grabber.setOption("threads", "1");
grabber.setOption("buffer_size", oneMb);
grabber.setOption("rw_timeout", fiveSecUs);
- grabber.setOption("stimeout", fiveSecUs);
- grabber.setOption("probesize", "1048576"); // ← 修正:probesize 是“字节”
+ grabber.setOption("stimeout", fiveSecUs);
+ grabber.setOption("probesize", "1048576");
grabber.setOption("analyzeduration", fiveSecUs);
grabber.setOption("fflags", "nobuffer");
- grabber.setOption("flags", "low_delay");
- grabber.setOption("loglevel", "error"); // 稳定后压低日志
+ grabber.setOption("flags", "low_delay");
+ grabber.setOption("loglevel", "error");
if (cameraDto.getUrl().toLowerCase().startsWith("rtsp://")) {
- grabber.setOption("rtsp_transport", "tcp"); // 你要测 UDP 再改
+ grabber.setOption("rtsp_transport", "tcp");
grabber.setOption("allowed_media_types", "video");
grabber.setOption("max_delay", "500000");
grabber.setOption("user_agent", "Lavf/60");
@@ -147,19 +271,17 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
try {
grabber.start();
- log.info("\n{}\n启动拉流器成功", cameraDto.getUrl());
+ log.info("启动拉流器成功: {}", cameraDto.getUrl());
return (grabberStatus = true);
} catch (FrameGrabber.Exception e) {
MediaService.cameras.remove(cameraDto.getMediaKey());
- log.error("\n{}\n启动拉流器失败,网络超时或视频源不可用({})",
- cameraDto.getUrl(), e.getMessage());
+ log.error("启动拉流器失败: {} ({})", cameraDto.getUrl(), e.getMessage());
return (grabberStatus = false);
}
}
- /*** ====== 录制器(转码/转复用) ====== ***/
protected boolean createTransterOrRecodeRecorder() {
- // 若启用检测,必须转码(因为需要在像素上画框)
+ // 启用检测时必须转码(需要像素级叠框)
if (enableDetection) transferFlag = false;
recorder = new FFmpegFrameRecorder(bos, grabber.getImageWidth(), grabber.getImageHeight(),
@@ -167,7 +289,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
recorder.setFormat("flv");
if (!transferFlag) {
- // 转码(低延迟 H.264)
+ // 转码:低延迟 H.264
recorder.setInterleaved(false);
recorder.setVideoOption("tune", "zerolatency");
recorder.setVideoOption("preset", "ultrafast");
@@ -188,20 +310,20 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
MediaService.cameras.remove(cameraDto.getMediaKey());
}
} else {
- // 转复用(不画框时可用)
+ // 转复用(仅不叠框时)
recorder.setCloseOutputStream(false);
try {
recorder.start(grabber.getFormatContext());
return recorderStatus = true;
} catch (FrameRecorder.Exception e) {
- log.warn("\r\n{}\r\n启动转复用录制器失败,自动切换转码", cameraDto.getUrl());
+ log.warn("{} 启动转复用失败,自动切换转码", cameraDto.getUrl());
transferFlag = false;
try { recorder.stop(); } catch (FrameRecorder.Exception ignored) {}
if (createTransterOrRecodeRecorder()) {
- log.error("\r\n{}\r\n切换到转码模式", cameraDto.getUrl());
+ log.error("{} 切换到转码模式", cameraDto.getUrl());
return true;
}
- log.error("\r\n{}\r\n切换转码模式失败", cameraDto.getUrl(), e);
+ log.error("{} 切换转码模式失败", cameraDto.getUrl(), e);
}
}
return recorderStatus = false;
@@ -216,23 +338,20 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
&& (avcodec.AV_CODEC_ID_AAC == acodec || avcodec.AV_CODEC_ID_AAC_LATM == acodec);
}
- /*** ====== 主流程:转换为 FLV 并输出 ====== ***/
+ /* ===================== 主流程 ===================== */
+
protected void transferStream2Flv() {
try {
if (enableDetection) initDetectors();
} catch (Exception e) {
log.error("初始化检测模型失败:{}", e.getMessage(), e);
- // 模型失败也不中断推流,只是不画框
- enableDetection = false;
}
if (!createGrabber()) return;
- // 如果未启用检测,且编解码本身支持 FLV,可以转复用提升性能
if (!enableDetection) transferFlag = supportFlvFormatCodec();
-
if (!createTransterOrRecodeRecorder()) return;
- try { grabber.flush(); } catch (FrameGrabber.Exception e) { log.info("清空拉流器缓存失败", e); }
+ try { grabber.flush(); } catch (FrameGrabber.Exception e) { log.debug("flush grabber fail", e); }
if (header == null) {
header = bos.toByteArray();
bos.reset();
@@ -242,77 +361,21 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
listenClient();
long startTime = 0;
- long videoTS = 0;
-
- // === 若启用检测,启动“解码→推理→渲染”解耦线程 ===
- Thread tDecode = null, tInfer = null;
- if (enableDetection) {
- // 解码线程:仅更新 latestFrame(覆盖式,不阻塞)
- tDecode = new Thread(() -> {
- while (running && grabberStatus) {
- try {
- Frame f = grabber.grabImage();
- if (f == null) continue;
- Mat m = toMat.convert(f);
- if (m == null || m.empty()) continue;
- Mat copy = new Mat(m.rows(), m.cols(), CV_8UC3);
- m.copyTo(copy);
- Mat old = latestFrame.getAndSet(copy);
- if (old != null) old.release();
- } catch (Exception e) {
- log.debug("decode err: {}", e.getMessage());
- }
- }
- }, "det-decode");
-
- // 推理线程:限速(默认 15 FPS),更新 latestDetections
- int inferFps = 15;
- long period = 1_000_000_000L / inferFps;
- tInfer = new Thread(() -> {
- long next = System.nanoTime();
- while (running && grabberStatus) {
- long now = System.nanoTime();
- if (now < next) { LockSupport.parkNanos(next - now); continue; }
- next += period;
-
- Mat src = latestFrame.get();
- if (src == null || src.empty()) continue;
- Mat snap = new Mat(); src.copyTo(snap);
- try {
- List dets = detector.detect(snap);
- latestDetections.set(dets);
- } catch (Throwable e) {
- log.debug("infer err: {}", e.getMessage());
- } finally {
- snap.release();
- }
- }
- }, "det-infer");
-
- // 抢先预热一次,避免前几帧无框
- try {
- Frame warm = grabber.grabImage();
- if (warm != null) {
- Mat wm = toMat.convert(warm);
- if (wm != null && !wm.empty() && detector != null) {
- latestDetections.set(detector.detect(wm));
- }
- }
- } catch (Exception ignored) { }
-
- tDecode.start();
- tInfer.start();
- }
-
- // === 主发送循环(转复用/转码两种路径) ===
+ long videoTS;
+
+ // 检测频率控制变量
+ final long DETECTION_INTERVAL_MS = 3000; // 每3秒检测一次
+ long lastDetectionTime = 0;
+ List currentDetections = Collections.emptyList(); // 当前显示的检测结果
+
for (; running && grabberStatus && recorderStatus; ) {
try {
if (transferFlag) {
- // ---- 转复用(不画框)----
+ // 仅转复用(未叠框)
long startGrab = System.currentTimeMillis();
AVPacket pkt = grabber.grabPacket();
if ((System.currentTimeMillis() - startGrab) > 5000) {
- log.info("\r\n{}\r\n视频流网络异常>>>", cameraDto.getUrl());
+ log.info("{} 网络异常(复用)", cameraDto.getUrl());
closeMedia();
break;
}
@@ -323,66 +386,227 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
recorder.recordPacket(pkt);
}
} else {
- // ---- 转码(可画框)----
+ // 转码(可叠框)
long startGrab = System.currentTimeMillis();
- Frame frame;
- if (enableDetection) {
- // 如果启用检测,解码线程已在跑;这里直接从 latestFrame 取,减少重复解码
- Mat src = latestFrame.get();
- if (src == null || src.empty()) continue;
-
- // 叠加最近一次检测结果
- Overlay.draw(latestDetections.get(), src);
- frame = toMat.convert(src);
- } else {
- // 未开启检测:直接 grab 并转码
- frame = grabber.grab();
- }
-
+ Frame frame = grabber.grab();
+
if ((System.currentTimeMillis() - startGrab) > 5000) {
- log.info("\r\n{}\r\n视频流网络异常>>>", cameraDto.getUrl());
+ log.info("{} 网络异常(转码)", cameraDto.getUrl());
closeMedia();
break;
}
+
+ if (frame != null && enableDetection) {
+ // 将Frame转换为Mat以进行处理
+ Mat mat = toMat.convert(frame);
+
+ if (mat != null && !mat.empty()) {
+ long currentTime = System.currentTimeMillis();
+
+ // 每隔DETECTION_INTERVAL_MS执行一次检测
+ if (currentTime - lastDetectionTime >= DETECTION_INTERVAL_MS) {
+ try {
+ log.debug("执行新一轮检测,上次检测时间: {}ms前",
+ currentTime - lastDetectionTime);
+
+ // 创建副本进行检测
+ Mat detectionMat = new Mat();
+ mat.copyTo(detectionMat);
+
+ // 执行检测
+ currentDetections = detector.detect(detectionMat);
+ lastDetectionTime = currentTime;
+ latestDetections.set(currentDetections);
+
+ // 释放检测Mat
+ detectionMat.release();
+
+ // 窗口巡检回调
+ if (windowMode && detectionListener != null &&
+ currentJobId != null && currentDeviceId != null) {
+ detectionListener.onDetections(currentJobId,
+ currentDeviceId,
+ currentDetections,
+ currentTime);
+ }
+
+ log.debug("检测完成,发现 {} 个目标,框将保持3秒",
+ currentDetections == null ? 0 : currentDetections.size());
+ } catch (Exception e) {
+ log.debug("检测异常: {}", e.getMessage());
+ }
+ }
+
+ // 每一帧都使用最新的检测结果绘制框
+ // 这样框会保持在原位置,直到下一次检测更新
+ if (currentDetections != null && !currentDetections.isEmpty()) {
+ try {
+ // 在当前帧上绘制检测框
+ Overlay.draw(currentDetections, mat);
+ } catch (Exception e) {
+ log.debug("绘制检测框异常: {}", e.getMessage());
+ }
+ }
+
+ // 更新"最近叠好框的帧"用于存证
+ updateLatestAnnotated(mat);
+
+ // 统计(仅窗口巡检时)
+ if (windowMode) updateStats(currentDetections);
+
+ // 窗口结束判定
+ if (windowMode && System.currentTimeMillis() >= windowEndMs) {
+ finishWindow();
+ }
+
+ // 将处理后的Mat转换回Frame
+ try {
+ // 创建新的转换器
+ OpenCVFrameConverter.ToMat converter = new OpenCVFrameConverter.ToMat();
+ Frame processedFrame = converter.convert(mat);
+
+ if (processedFrame != null) {
+ // 使用处理后的帧替换原始帧
+ frame = processedFrame;
+ }
+ } catch (Exception e) {
+ log.debug("Mat转Frame异常: {}", e.getMessage());
+ // 如果转换失败,继续使用原始帧
+ }
+
+ // 释放Mat
+ mat.release();
+ }
+ }
+
+ // 记录帧
if (frame != null) {
- if (startTime == 0) startTime = System.currentTimeMillis();
- videoTS = 1000 * (System.currentTimeMillis() - startTime);
+ long now = System.currentTimeMillis();
+ if (startTime == 0) startTime = now;
+ videoTS = 1000 * (now - startTime);
if (videoTS > recorder.getTimestamp()) recorder.setTimestamp(videoTS);
recorder.record(frame);
}
}
} catch (FrameGrabber.Exception e) {
+ log.error("拉流异常: {}", e.getMessage());
grabberStatus = false;
MediaService.cameras.remove(cameraDto.getMediaKey());
} catch (FrameRecorder.Exception e) {
+ log.error("推流异常: {}", e.getMessage());
recorderStatus = false;
MediaService.cameras.remove(cameraDto.getMediaKey());
+ } catch (Exception e) {
+ log.error("其他异常: {}", e.getMessage());
+ // 不要立即退出,尝试继续处理
+ try {
+ Thread.sleep(100);
+ } catch (InterruptedException ie) {
+ Thread.currentThread().interrupt();
+ }
}
- if (bos.size() > 0) {
- byte[] b = bos.toByteArray();
- bos.reset();
- sendFrameData(b);
+ // 输出缓存到客户端
+ try {
+ if (bos.size() > 0) {
+ byte[] b = bos.toByteArray();
+ bos.reset();
+ sendFrameData(b);
+ }
+ } catch (Exception e) {
+ log.error("发送数据异常: {}", e.getMessage());
}
}
- // === 收尾 ===
- try {
- if (detector != null) try { detector.close(); } catch (Exception ignored) {}
- if (modelManager != null) try { modelManager.close(); } catch (Exception ignored) {}
- if (recorder != null) recorder.close();
- if (grabber != null) grabber.close();
- bos.close();
- } catch (Exception ignored) {
- } finally {
- Mat m = latestFrame.getAndSet(null);
- if (m != null) m.release();
- closeMedia();
- }
- log.info("关闭媒体流-javacv,{} ", cameraDto.getUrl());
+ // 安全地关闭资源
+ safeCloseResources();
}
- /*** ====== 网络发送(原样保留) ====== ***/
+ // 将资源关闭逻辑提取到单独的方法
+ private void safeCloseResources() {
+ try {
+ if (detector != null) {
+ try { detector.close(); } catch (Exception ignored) {}
+ }
+
+ if (modelManager != null) {
+ try { modelManager.close(); } catch (Exception ignored) {}
+ }
+
+ if (recorder != null) {
+ try { recorder.close(); } catch (Exception ignored) {}
+ }
+
+ if (grabber != null) {
+ try { grabber.close(); } catch (Exception ignored) {}
+ }
+
+ try { bos.close(); } catch (Exception ignored) {}
+
+ Mat m = latestFrame.getAndSet(null);
+ if (m != null) {
+ try { m.release(); } catch (Exception ignored) {}
+ }
+
+ Mat a = latestAnnotatedFrame.getAndSet(null);
+ if (a != null) {
+ try { a.release(); } catch (Exception ignored) {}
+ }
+
+ closeMedia();
+ } catch (Exception e) {
+ log.error("关闭资源异常: {}", e.getMessage());
+ }
+
+ log.info("关闭媒体流-javacv: {}", cameraDto.getUrl());
+ }
+
+ /* ===================== 统计 / 窗口结束 ===================== */
+
+ private void updateStats(List dets) {
+ stats.setFrames(stats.getFrames() + 1);
+ if (dets != null && !dets.isEmpty()) {
+ stats.setDetectedFrames(stats.getDetectedFrames() + 1);
+ stats.setObjects(stats.getObjects() + dets.size());
+ double localMax = dets.stream().mapToDouble(Detection::conf).max().orElse(0.0);
+ if (localMax > stats.getMaxScore()) stats.setMaxScore(localMax);
+ }
+ }
+
+ private void finishWindow() {
+ windowMode = false;
+ stats.setEndMs(System.currentTimeMillis());
+ if (detectionListener != null && currentJobId != null && currentDeviceId != null) {
+ try { detectionListener.onWindowFinished(currentJobId, currentDeviceId, cloneStats(stats)); }
+ catch (Exception ignore) {}
+ }
+ currentJobId = null;
+ currentDeviceId = null;
+ detectionListener = null;
+ log.info("window finished (timeout)");
+ }
+
+ private static WindowStats cloneStats(WindowStats s) {
+ WindowStats c = new WindowStats();
+ c.setFrames(s.getFrames());
+ c.setDetectedFrames(s.getDetectedFrames());
+ c.setObjects(s.getObjects());
+ c.setMaxScore(s.getMaxScore());
+ c.setStartMs(s.getStartMs());
+ c.setEndMs(s.getEndMs());
+ return c;
+ }
+
+ private void updateLatestAnnotated(Mat src) {
+ if (src == null || src.empty()) return;
+ Mat copy = new Mat(src.rows(), src.cols(), src.type());
+ src.copyTo(copy);
+ Mat old = latestAnnotatedFrame.getAndSet(copy);
+ if (old != null) old.release();
+ }
+
+ /* ===================== 网络发送/连接管理 ===================== */
+
private void sendFrameData(byte[] data) {
// ws
for (Map.Entry entry : wsClients.entrySet()) {
@@ -396,7 +620,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
} catch (Exception e) {
wsClients.remove(entry.getKey());
hasClient();
- e.printStackTrace();
+ log.debug("ws send err", e);
}
}
// http
@@ -411,7 +635,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
} catch (Exception e) {
httpClients.remove(entry.getKey());
hasClient();
- e.printStackTrace();
+ log.debug("http send err", e);
}
}
}
@@ -422,7 +646,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
if (hcSize != newHcSize || wcSize != newWcSize) {
hcSize = newHcSize;
wcSize = newWcSize;
- log.info("\r\n{}\r\nhttp连接数:{}, ws连接数:{} \r\n", cameraDto.getUrl(), newHcSize, newWcSize);
+ log.info("{} http连接数:{}, ws连接数:{}", cameraDto.getUrl(), newHcSize, newWcSize);
}
if (!cameraDto.isAutoClose()) return;
@@ -448,15 +672,16 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
}
private void closeMedia() {
+ // 结束窗口(如果还在)
+ stopWindowIfAny();
+
running = false;
MediaService.cameras.remove(cameraDto.getMediaKey());
for (Map.Entry entry : wsClients.entrySet()) {
- try { entry.getValue().close(); } catch (Exception ignored) {}
- finally { wsClients.remove(entry.getKey()); }
+ try { entry.getValue().close(); } catch (Exception ignored) {} finally { wsClients.remove(entry.getKey()); }
}
for (Map.Entry entry : httpClients.entrySet()) {
- try { entry.getValue().close(); } catch (Exception ignored) {}
- finally { httpClients.remove(entry.getKey()); }
+ try { entry.getValue().close(); } catch (Exception ignored) {} finally { httpClients.remove(entry.getKey()); }
}
}
@@ -491,7 +716,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
}
}
} catch (Exception e) {
- e.printStackTrace();
+ log.debug("send header err", e);
}
break;
}
@@ -499,10 +724,13 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
timeout += 50;
if (timeout > 30000) break;
} catch (Exception e) {
- e.printStackTrace();
+ log.debug("addClient err", e);
}
}
}
- @Override public void run() { transferStream2Flv(); }
+ @Override
+ public void run() {
+ transferStream2Flv();
+ }
}
diff --git a/ruoyi-video/src/main/java/com/ruoyi/video/thread/detector/DummyDetector.java b/ruoyi-video/src/main/java/com/ruoyi/video/thread/detector/DummyDetector.java
new file mode 100644
index 0000000..7f771b7
--- /dev/null
+++ b/ruoyi-video/src/main/java/com/ruoyi/video/thread/detector/DummyDetector.java
@@ -0,0 +1,29 @@
+package com.ruoyi.video.thread.detector;
+
+import com.ruoyi.video.domain.Detection;
+import org.bytedeco.opencv.opencv_core.*;
+
+import java.util.*;
+
+/**
+ * 空检测器,用于在模型加载失败时提供回退机制
+ */
+public class DummyDetector implements YoloDetector {
+ private final String name;
+
+ public DummyDetector(String name) {
+ this.name = name;
+ System.out.println("警告: 使用DummyDetector替代真实模型 - " + name);
+ }
+
+ @Override
+ public String name() {
+ return name;
+ }
+
+ @Override
+ public List detect(Mat bgr) {
+ // 返回空列表,不进行实际检测
+ return Collections.emptyList();
+ }
+}
\ No newline at end of file
diff --git a/ruoyi-video/src/main/java/com/ruoyi/video/thread/detector/OnnxYoloDetector.java b/ruoyi-video/src/main/java/com/ruoyi/video/thread/detector/OnnxYoloDetector.java
new file mode 100644
index 0000000..44156ed
--- /dev/null
+++ b/ruoyi-video/src/main/java/com/ruoyi/video/thread/detector/OnnxYoloDetector.java
@@ -0,0 +1,249 @@
+package com.ruoyi.video.thread.detector;
+
+import com.ruoyi.video.domain.Detection;
+import org.bytedeco.javacpp.indexer.FloatRawIndexer;
+import org.bytedeco.opencv.opencv_core.*;
+import org.bytedeco.opencv.opencv_dnn.Net;
+
+import java.nio.file.*;
+import java.util.*;
+
+import static org.bytedeco.opencv.global.opencv_dnn.*;
+import static org.bytedeco.opencv.global.opencv_core.*;
+import static org.bytedeco.opencv.global.opencv_imgproc.*;
+
+public final class OnnxYoloDetector implements YoloDetector {
+ private final String modelName;
+ private final Net net;
+ private final Size input;
+ private final float confTh = 0.25f, nmsTh = 0.45f;
+ private final String[] classes;
+ private final int colorBGR;
+
+ public OnnxYoloDetector(String name, Path dir, int inW, int inH, String backend, int colorBGR) throws Exception {
+ this.modelName = name;
+ this.input = new Size(inW, inH);
+ this.colorBGR = colorBGR;
+
+ // 查找ONNX模型文件
+ String onnx = findModelFile(dir, ".onnx");
+ if (onnx == null) {
+ throw new Exception("找不到ONNX模型文件,请确保目录中存在 .onnx 文件: " + dir);
+ }
+
+ // 读取类别文件
+ Path clsPath = dir.resolve("classes.txt");
+ if (Files.exists(clsPath)) {
+ this.classes = Files.readAllLines(clsPath).stream().map(String::trim)
+ .filter(s -> !s.isEmpty()).toArray(String[]::new);
+ } else {
+ this.classes = new String[0];
+ }
+
+ try {
+ // 加载ONNX模型
+ this.net = readNetFromONNX(onnx);
+
+ // 设置OpenCV后端
+ net.setPreferableBackend(DNN_BACKEND_OPENCV);
+ net.setPreferableTarget(DNN_TARGET_CPU);
+
+ System.out.println("ONNX模型加载成功: " + name + " (" + onnx + ")");
+
+ } catch (Exception e) {
+ throw new Exception("模型加载失败: " + e.getMessage() +
+ "\n请确保ONNX模型文件格式正确", e);
+ }
+ }
+
+ /**
+ * 在目录中查找指定扩展名的模型文件
+ */
+ private String findModelFile(Path dir, String extension) {
+ try {
+ return Files.list(dir)
+ .filter(path -> path.toString().toLowerCase().endsWith(extension.toLowerCase()))
+ .map(Path::toString)
+ .findFirst()
+ .orElse(null);
+ } catch (Exception e) {
+ return null;
+ }
+ }
+
+ @Override public String name() { return modelName; }
+
+ @Override
+ public List detect(Mat bgr) {
+ if (bgr == null || bgr.empty()) return Collections.emptyList();
+
+ // 统一成 BGR 3 通道,避免 blobFromImage 断言失败
+ if (bgr.channels() != 3) {
+ Mat tmp = new Mat();
+ if (bgr.channels() == 1) cvtColor(bgr, tmp, COLOR_GRAY2BGR);
+ else if (bgr.channels() == 4) cvtColor(bgr, tmp, COLOR_BGRA2BGR);
+ else bgr.copyTo(tmp);
+ bgr = tmp;
+ }
+
+ try (Mat blob = blobFromImage(bgr, 1.0/255.0, input, new Scalar(0.0), true, false, CV_32F)) {
+ net.setInput(blob);
+ // ===== 多输出兼容(Bytedeco 正确写法)=====
+ org.bytedeco.opencv.opencv_core.StringVector outNames = net.getUnconnectedOutLayersNames();
+ List outs = new ArrayList<>();
+
+ if (outNames == null || outNames.size() == 0) {
+ // 只有一个默认输出
+ Mat out = net.forward(); // ← 直接返回 Mat
+ outs.add(out);
+ } else {
+ // 多输出:用 MatVector 承接
+ org.bytedeco.opencv.opencv_core.MatVector outBlobs =
+ new org.bytedeco.opencv.opencv_core.MatVector(outNames.size());
+ net.forward(outBlobs, outNames); // ← 正确的重载
+
+ for (long i = 0; i < outBlobs.size(); i++) {
+ outs.add(outBlobs.get(i));
+ }
+ }
+
+ int fw = bgr.cols(), fh = bgr.rows();
+ List boxes = new ArrayList<>();
+ List scores = new ArrayList<>();
+ List classIds = new ArrayList<>();
+
+ for (Mat out : outs) {
+ parseYoloOutput(out, fw, fh, boxes, scores, classIds);
+ }
+ if (boxes.isEmpty()) return Collections.emptyList();
+
+ // 纯 Java NMS,避免 MatOf* / Vector API 兼容问题
+ List keep = nmsIndices(boxes, scores, nmsTh);
+
+ List result = new ArrayList<>(keep.size());
+ for (int k : keep) {
+ Rect2d r = boxes.get(k);
+ Rect rect = new Rect((int)r.x(), (int)r.y(), (int)r.width(), (int)r.height());
+ int cid = classIds.get(k);
+ String cname = (cid >= 0 && cid < classes.length) ? classes[cid] : ("cls"+cid);
+ result.add(new Detection("["+modelName+"] "+cname, scores.get(k), rect, colorBGR));
+ }
+ return result;
+ } catch (Throwable e) {
+ // 单帧失败不影响整体
+ return Collections.emptyList();
+ }
+ }
+
+ /** 解析 YOLO-IR 输出为 N×C(C>=6),并填充 boxes/scores/classIds。 */
+ private void parseYoloOutput(Mat out, int fw, int fh,
+ List boxes, List scores, List classIds) {
+ int dims = out.dims();
+ Mat m;
+
+ if (dims == 2) {
+ // NxC 或 CxN
+ if (out.cols() >= 6) {
+ m = out;
+ } else {
+ Mat tmp = new Mat();
+ transpose(out, tmp); // CxN -> NxC
+ m = tmp;
+ }
+ } else if (dims == 3) {
+ // [1,N,C] 或 [1,C,N]
+ if (out.size(2) >= 6) {
+ m = out.reshape(1, out.size(1)); // -> N×C
+ } else {
+ Mat squeezed = out.reshape(1, out.size(1)); // C×N
+ Mat tmp = new Mat();
+ transpose(squeezed, tmp); // -> N×C
+ m = tmp;
+ }
+ } else if (dims == 4) {
+ // [1,1,N,C] 或 [1,1,C,N]
+ int a = out.size(2), b = out.size(3);
+ if (b >= 6) {
+ m = out.reshape(1, a).clone(); // -> N×C
+ } else {
+ Mat cxn = out.reshape(1, b); // C×N
+ Mat tmp = new Mat();
+ transpose(cxn, tmp); // -> N×C
+ m = tmp.clone();
+ }
+ } else {
+ return; // 不支持的形状
+ }
+
+ int N = m.rows(), C = m.cols();
+ if (C < 6 || N <= 0) return;
+
+ FloatRawIndexer idx = m.createIndexer();
+ for (int i = 0; i < N; i++) {
+ float cx = idx.get(i,0), cy = idx.get(i,1), w = idx.get(i,2), h = idx.get(i,3);
+ float obj = idx.get(i,4);
+
+ int bestCls = -1; float bestScore = 0f;
+ for (int c = 5; c < C; c++) {
+ float p = idx.get(i,c);
+ if (p > bestScore) { bestScore = p; bestCls = c - 5; }
+ }
+ float conf = obj * bestScore;
+ if (conf < confTh) continue;
+
+ // 默认假设归一化中心点格式 (cx,cy,w,h);若你的 IR 是 x1,y1,x2,y2,请把这里换算改掉
+ int bx = Math.max(0, Math.round(cx * fw - (w * fw) / 2f));
+ int by = Math.max(0, Math.round(cy * fh - (h * fh) / 2f));
+ int bw = Math.min(fw - bx, Math.round(w * fw));
+ int bh = Math.min(fh - by, Math.round(h * fh));
+ if (bw <= 0 || bh <= 0) continue;
+
+ boxes.add(new Rect2d(bx, by, bw, bh));
+ scores.add(conf);
+ classIds.add(bestCls);
+ }
+ }
+
+ /** 纯 Java NMS(IoU 抑制),返回保留的下标列表。 */
+ private List nmsIndices(List boxes, List scores, float nmsThreshold) {
+ List order = new ArrayList<>(boxes.size());
+ for (int i = 0; i < boxes.size(); i++) order.add(i);
+ // 按分数降序
+ order.sort((i, j) -> Float.compare(scores.get(j), scores.get(i)));
+
+ List keep = new ArrayList<>();
+ boolean[] removed = new boolean[boxes.size()];
+
+ for (int a = 0; a < order.size(); a++) {
+ int i = order.get(a);
+ if (removed[i]) continue;
+ keep.add(i);
+
+ Rect2d bi = boxes.get(i);
+ double areaI = bi.width() * bi.height();
+
+ for (int b = a + 1; b < order.size(); b++) {
+ int j = order.get(b);
+ if (removed[j]) continue;
+
+ Rect2d bj = boxes.get(j);
+ double areaJ = bj.width() * bj.height();
+
+ double xx1 = Math.max(bi.x(), bj.x());
+ double yy1 = Math.max(bi.y(), bj.y());
+ double xx2 = Math.min(bi.x() + bi.width(), bj.x() + bj.width());
+ double yy2 = Math.min(bi.y() + bi.height(), bj.y() + bj.height());
+
+ double w = Math.max(0, xx2 - xx1);
+ double h = Math.max(0, yy2 - yy1);
+ double inter = w * h;
+ double iou = inter / (areaI + areaJ - inter + 1e-9);
+
+ if (iou > nmsThreshold) removed[j] = true;
+ }
+ }
+ return keep;
+ }
+
+ @Override public void close(){ net.close(); }
+}
\ No newline at end of file
diff --git a/ruoyi-video/src/main/java/com/ruoyi/video/thread/detector/OpenVinoYoloDetector.java b/ruoyi-video/src/main/java/com/ruoyi/video/thread/detector/OpenVinoYoloDetector.java
index c7ea03e..855e61b 100644
--- a/ruoyi-video/src/main/java/com/ruoyi/video/thread/detector/OpenVinoYoloDetector.java
+++ b/ruoyi-video/src/main/java/com/ruoyi/video/thread/detector/OpenVinoYoloDetector.java
@@ -25,9 +25,15 @@ public final class OpenVinoYoloDetector implements YoloDetector {
this.input = new Size(inW, inH);
this.colorBGR = colorBGR;
- String xml = dir.resolve("model.xml").toString();
- String bin = dir.resolve("model.bin").toString();
+ // 自动查找模型文件
+ String xml = findModelFile(dir, ".xml");
+ String bin = findModelFile(dir, ".bin");
+ if (xml == null || bin == null) {
+ throw new Exception("找不到模型文件,请确保目录中存在 .xml 和 .bin 文件: " + dir);
+ }
+
+ // 读取类别文件
Path clsPath = dir.resolve("classes.txt");
if (Files.exists(clsPath)) {
this.classes = Files.readAllLines(clsPath).stream().map(String::trim)
@@ -36,19 +42,34 @@ public final class OpenVinoYoloDetector implements YoloDetector {
this.classes = new String[0];
}
- this.net = readNetFromModelOptimizer(xml, bin);
-
- boolean set = false;
- if ("openvino".equalsIgnoreCase(backend)) {
- try {
- net.setPreferableBackend(DNN_BACKEND_INFERENCE_ENGINE);
- net.setPreferableTarget(DNN_TARGET_CPU);
- set = true;
- } catch (Throwable ignore) { /* 回退 */ }
- }
- if (!set) {
+ try {
+ // 加载模型,但强制使用OpenCV后端
+ this.net = readNetFromModelOptimizer(xml, bin);
+
+ // 强制使用OpenCV后端,避免OpenVINO依赖
net.setPreferableBackend(DNN_BACKEND_OPENCV);
net.setPreferableTarget(DNN_TARGET_CPU);
+
+ System.out.println("模型加载成功: " + name + " (使用OpenCV后端)");
+
+ } catch (Exception e) {
+ throw new Exception("模型加载失败: " + e.getMessage() +
+ "\n请确保模型文件完整且格式正确", e);
+ }
+ }
+
+ /**
+ * 在目录中查找指定扩展名的模型文件
+ */
+ private String findModelFile(Path dir, String extension) {
+ try {
+ return Files.list(dir)
+ .filter(path -> path.toString().toLowerCase().endsWith(extension.toLowerCase()))
+ .map(Path::toString)
+ .findFirst()
+ .orElse(null);
+ } catch (Exception e) {
+ return null;
}
}
@@ -227,4 +248,4 @@ public final class OpenVinoYoloDetector implements YoloDetector {
}
@Override public void close(){ net.close(); }
-}
+}
\ No newline at end of file
diff --git a/ruoyi-video/src/main/resources/libs/models/garbage/best.onnx b/ruoyi-video/src/main/resources/libs/models/garbage/best.onnx
new file mode 100644
index 0000000..26951a1
Binary files /dev/null and b/ruoyi-video/src/main/resources/libs/models/garbage/best.onnx differ
diff --git a/ruoyi-video/src/main/resources/libs/models/models.json b/ruoyi-video/src/main/resources/libs/models/models.json
index 8a19a3c..38e5aa4 100644
--- a/ruoyi-video/src/main/resources/libs/models/models.json
+++ b/ruoyi-video/src/main/resources/libs/models/models.json
@@ -1,4 +1,4 @@
[
- {"name":"smoke","path":"models/smoke","size":[640,640],"backend":"openvino"},
- {"name":"garbage","path":"models/garbage","size":[640,640],"backend":"openvino"}
+ {"name":"smoke","path":"libs/models/smoke","size":[640,640],"backend":"opencv"},
+ {"name":"garbage","path":"libs/models/garbage","size":[640,640],"backend":"opencv"}
]
diff --git a/ruoyi-video/src/main/resources/libs/models/smoke/smoke.onnx b/ruoyi-video/src/main/resources/libs/models/smoke/smoke.onnx
new file mode 100644
index 0000000..d24a0c4
Binary files /dev/null and b/ruoyi-video/src/main/resources/libs/models/smoke/smoke.onnx differ
diff --git a/ruoyi-video/src/main/resources/models/detection-config.json b/ruoyi-video/src/main/resources/models/detection-config.json
new file mode 100644
index 0000000..9ab8e10
--- /dev/null
+++ b/ruoyi-video/src/main/resources/models/detection-config.json
@@ -0,0 +1,33 @@
+{
+ "models": [
+ {
+ "name": "yolo",
+ "type": "object_detection",
+ "modelPath": "models/yolo.onnx",
+ "configPath": "models/yolo.cfg",
+ "weightsPath": "models/yolo.weights",
+ "classNames": [
+ "person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck",
+ "boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench",
+ "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra",
+ "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee",
+ "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove",
+ "skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup",
+ "fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange",
+ "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "sofa",
+ "pottedplant", "bed", "diningtable", "toilet", "tvmonitor", "laptop", "mouse",
+ "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink",
+ "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier",
+ "toothbrush"
+ ],
+ "inputSize": {
+ "width": 640,
+ "height": 640
+ },
+ "threshold": 0.5,
+ "nmsThreshold": 0.4
+ }
+ ],
+ "enabled": true,
+ "defaultModel": "yolo"
+}
\ No newline at end of file