This commit is contained in:
砂糖
2025-09-28 15:22:23 +08:00
19 changed files with 1038 additions and 252 deletions

20
bin/start-rtsp-server.bat Normal file
View File

@@ -0,0 +1,20 @@
@echo off
echo 启动RTSP测试服务器...
echo.
echo 请确保已安装FFmpeg
echo.
REM 使用FFmpeg创建一个简单的RTSP服务器
REM 这里使用测试视频文件,你可以替换为实际的视频文件路径
set VIDEO_FILE=test.mp4
REM 如果没有测试视频,创建一个测试模式
if not exist "%VIDEO_FILE%" (
echo 创建测试视频流...
ffmpeg -f lavfi -i testsrc=duration=3600:size=640x480:rate=25 -f rtsp rtsp://localhost:8554/11
) else (
echo 使用视频文件: %VIDEO_FILE%
ffmpeg -re -i "%VIDEO_FILE%" -c copy -f rtsp rtsp://localhost:8554/11
)
pause

24
bin/start-rtsp-server.sh Normal file
View File

@@ -0,0 +1,24 @@
#!/bin/bash
echo "启动RTSP测试服务器..."
echo
# 检查FFmpeg是否安装
if ! command -v ffmpeg &> /dev/null; then
echo "错误: FFmpeg未安装请先安装FFmpeg"
echo "Ubuntu/Debian: sudo apt install ffmpeg"
echo "CentOS/RHEL: sudo yum install ffmpeg"
exit 1
fi
# 视频文件路径
VIDEO_FILE="test.mp4"
# 如果没有测试视频,创建一个测试模式
if [ ! -f "$VIDEO_FILE" ]; then
echo "创建测试视频流..."
ffmpeg -f lavfi -i testsrc=duration=3600:size=640x480:rate=25 -c:v libx264 -preset ultrafast -f rtsp rtsp://localhost:8554/11
else
echo "使用视频文件: $VIDEO_FILE"
ffmpeg -re -i "$VIDEO_FILE" -c copy -f rtsp rtsp://localhost:8554/11
fi

View File

@@ -42,7 +42,7 @@
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.9</version> <!-- 版本号需与项目兼容 -->
<version>1.5.10</version> <!-- 版本号需与项目兼容 -->
</dependency>
<!-- SpringBoot的依赖配置-->
<dependency>

View File

@@ -63,9 +63,8 @@
<artifactId>spring-context</artifactId>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.9</version> <!-- 版本号需与项目兼容 -->
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
</dependency>
</dependencies>

View File

@@ -152,3 +152,17 @@ mediasServer:
arcFace:
appId: '替换成你的appId'
sdkKey: '替换成你的sdkKey'
# 视频分析配置
video:
# 是否启用AI检测功能
detection:
enabled: false
# RTSP配置
rtsp:
# 连接超时时间(微秒)
timeout: 10000000
# 传输协议 tcp/udp
transport: tcp
# 重试次数
retryCount: 3

View File

@@ -18,12 +18,7 @@
</properties>
<dependencies>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.9</version> <!-- 版本号需与项目兼容 -->
</dependency>
<!-- 通用工具-->
<!-- 通用工具 -->
<dependency>
<groupId>com.ruoyi</groupId>
<artifactId>ruoyi-common</artifactId>
@@ -33,20 +28,25 @@
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.24</version>
<scope>provided</scope>
</dependency>
<!-- 包含 OpenCV/FFmpeg 等全部平台原生库win/linux/mac -->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv</artifactId>
<version>1.5.5</version>
<artifactId>javacv-platform</artifactId>
<version>1.5.10</version>
</dependency>
<!-- 解析 models.json-->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>ffmpeg-platform</artifactId>
<version>4.3.2-1.5.5</version>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.17.1</version>
</dependency>
<!-- 你的其它业务依赖:保持不变 -->
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
@@ -59,56 +59,20 @@
<version>4.1.59.Final</version>
</dependency>
<!-- commons-io-->
<!-- commons-io -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.11.0</version>
</dependency>
<!-- commons-lang-->
<!-- commons-lang注意这是 2.x 老版,如果无强依赖可考虑迁移到 commons-lang3 -->
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
</dependency>
<!-- &lt;!&ndash;加载本地jar包&ndash;&gt;-->
<!-- <dependency>-->
<!-- <groupId>sunjce_provider</groupId>-->
<!-- <artifactId>sunjce_provider</artifactId>-->
<!-- <version>0.0.1</version>-->
<!-- <scope>system</scope>-->
<!-- <systemPath>${project.basedir}/src/main/resources/libs/arcsoft-sdk-face-3.0.0.0.jar-->
<!-- </systemPath>-->
<!-- </dependency>-->
<!-- 一行解决:包含 FFmpeg/OpenCV 等全部平台原生库 -->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.10</version>
</dependency>
<!-- 解析 models.json 用(如果你按我给的多模型配置走) -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.17.1</version>
</dependency>
<!-- JavaCV 封装 -->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.10</version>
</dependency>
<!-- OpenCV 平台包(含 DNN 模块) -->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>opencv-platform</artifactId>
<version>4.9.0-1.5.10</version>
</dependency>
</dependencies>
</project>

View File

@@ -32,7 +32,8 @@ public final class ModelManager implements AutoCloseable {
int rgb = palette[i % palette.length]; i++;
int bgr = ((rgb & 0xFF) << 16) | (rgb & 0xFF00) | ((rgb >> 16) & 0xFF);
YoloDetector det = new OpenVinoYoloDetector(name, dir, w, h, backend, bgr);
// 使用OnnxYoloDetector替代OpenVinoYoloDetector
YoloDetector det = new OnnxYoloDetector(name, dir, w, h, backend, bgr);
map.put(name, det);
}
}
@@ -44,4 +45,4 @@ public final class ModelManager implements AutoCloseable {
map.values().forEach(d -> { try { d.close(); } catch(Exception ignored){} });
map.clear();
}
}
}

View File

@@ -0,0 +1,17 @@
package com.ruoyi.video.service;
import org.bytedeco.opencv.opencv_core.Mat;
/**
* 截取“叠好框的最新一帧”并存证(文件系统 / 数据库BLOB
*/
public interface ImageStoreService {
/**
* 从指定 device 的推流实例中,读取“叠好框”的最新一帧并保存。
* @param deviceId 设备ID
* @return 文件路径,或 "db://image/{id}"
*/
String saveLastAnnotatedFrame(Long deviceId);
}

View File

@@ -8,9 +8,11 @@ import com.ruoyi.video.thread.MediaTransfer;
import com.ruoyi.video.thread.MediaTransferFlvByFFmpeg;
import com.ruoyi.video.thread.MediaTransferFlvByJavacv;
import io.netty.channel.ChannelHandlerContext;
import java.util.concurrent.ConcurrentHashMap;
import org.springframework.stereotype.Service;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Consumer;
/**
* 媒体服务,支持全局网络超时、读写超时、无人拉流持续时长自动关闭流等配置
* @Author: orange
@@ -24,20 +26,19 @@ public class MediaService {
*/
public static ConcurrentHashMap<String, MediaTransfer> cameras = new ConcurrentHashMap<>();
/**
* http-flv播放
* @param cameraDto
* @param ctx
* @param cameraDto 摄像头配置
* @param ctx Netty上下文
*/
public void playForHttp(CameraDto cameraDto, ChannelHandlerContext ctx) {
if (cameras.containsKey(cameraDto.getMediaKey())) {
MediaTransfer mediaConvert = cameras.get(cameraDto.getMediaKey());
if(mediaConvert instanceof MediaTransferFlvByJavacv) {
if (mediaConvert instanceof MediaTransferFlvByJavacv) {
MediaTransferFlvByJavacv mediaTransferFlvByJavacv = (MediaTransferFlvByJavacv) mediaConvert;
//如果当前已经用ffmpeg则重新拉流
if(cameraDto.isEnabledFFmpeg()) {
if (cameraDto.isEnabledFFmpeg()) {
mediaTransferFlvByJavacv.setRunning(false);
cameras.remove(cameraDto.getMediaKey());
this.playForHttp(cameraDto, ctx);
@@ -47,7 +48,7 @@ public class MediaService {
} else if (mediaConvert instanceof MediaTransferFlvByFFmpeg) {
MediaTransferFlvByFFmpeg mediaTransferFlvByFFmpeg = (MediaTransferFlvByFFmpeg) mediaConvert;
//如果当前已经用javacv则关闭再重新拉流
if(!cameraDto.isEnabledFFmpeg()) {
if (!cameraDto.isEnabledFFmpeg()) {
mediaTransferFlvByFFmpeg.stopFFmpeg();
cameras.remove(cameraDto.getMediaKey());
this.playForHttp(cameraDto, ctx);
@@ -57,7 +58,7 @@ public class MediaService {
}
} else {
if(cameraDto.isEnabledFFmpeg()) {
if (cameraDto.isEnabledFFmpeg()) {
MediaTransferFlvByFFmpeg mediaft = new MediaTransferFlvByFFmpeg(cameraDto);
mediaft.execute();
cameras.put(cameraDto.getMediaKey(), mediaft);
@@ -74,17 +75,17 @@ public class MediaService {
/**
* ws-flv播放
* @param cameraDto
* @param ctx
* @param cameraDto 摄像头配置
* @param ctx Netty上下文
*/
public void playForWs(CameraDto cameraDto, ChannelHandlerContext ctx) {
if (cameras.containsKey(cameraDto.getMediaKey())) {
MediaTransfer mediaConvert = cameras.get(cameraDto.getMediaKey());
if(mediaConvert instanceof MediaTransferFlvByJavacv) {
if (mediaConvert instanceof MediaTransferFlvByJavacv) {
MediaTransferFlvByJavacv mediaTransferFlvByJavacv = (MediaTransferFlvByJavacv) mediaConvert;
//如果当前已经用ffmpeg则重新拉流
if(cameraDto.isEnabledFFmpeg()) {
if (cameraDto.isEnabledFFmpeg()) {
mediaTransferFlvByJavacv.setRunning(false);
cameras.remove(cameraDto.getMediaKey());
this.playForWs(cameraDto, ctx);
@@ -94,7 +95,7 @@ public class MediaService {
} else if (mediaConvert instanceof MediaTransferFlvByFFmpeg) {
MediaTransferFlvByFFmpeg mediaTransferFlvByFFmpeg = (MediaTransferFlvByFFmpeg) mediaConvert;
//如果当前已经用javacv则关闭再重新拉流
if(!cameraDto.isEnabledFFmpeg()) {
if (!cameraDto.isEnabledFFmpeg()) {
mediaTransferFlvByFFmpeg.stopFFmpeg();
cameras.remove(cameraDto.getMediaKey());
this.playForWs(cameraDto, ctx);
@@ -103,7 +104,7 @@ public class MediaService {
}
}
} else {
if(cameraDto.isEnabledFFmpeg()) {
if (cameraDto.isEnabledFFmpeg()) {
MediaTransferFlvByFFmpeg mediaft = new MediaTransferFlvByFFmpeg(cameraDto);
mediaft.execute();
cameras.put(cameraDto.getMediaKey(), mediaft);
@@ -119,8 +120,8 @@ public class MediaService {
/**
* api播放
* @param cameraDto
* @return
* @param cameraDto 摄像头配置
* @return 是否启动成功
*/
public boolean playForApi(CameraDto cameraDto) {
// 区分不同媒体
@@ -130,7 +131,7 @@ public class MediaService {
MediaTransfer mediaTransfer = cameras.get(cameraDto.getMediaKey());
if (null == mediaTransfer) {
if(cameraDto.isEnabledFFmpeg()) {
if (cameraDto.isEnabledFFmpeg()) {
MediaTransferFlvByFFmpeg mediaft = new MediaTransferFlvByFFmpeg(cameraDto);
mediaft.execute();
cameras.put(cameraDto.getMediaKey(), mediaft);
@@ -143,7 +144,7 @@ public class MediaService {
mediaTransfer = cameras.get(cameraDto.getMediaKey());
//同步等待
if(mediaTransfer instanceof MediaTransferFlvByJavacv) {
if (mediaTransfer instanceof MediaTransferFlvByJavacv) {
MediaTransferFlvByJavacv mediaft = (MediaTransferFlvByJavacv) mediaTransfer;
// 30秒还没true认为启动不了
for (int i = 0; i < 60; i++) {
@@ -153,6 +154,7 @@ public class MediaService {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// ignore
}
}
} else if (mediaTransfer instanceof MediaTransferFlvByFFmpeg) {
@@ -165,6 +167,7 @@ public class MediaService {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// ignore
}
}
}
@@ -173,14 +176,14 @@ public class MediaService {
/**
* 关闭流
* @param cameraDto
* @param cameraDto 摄像头配置
*/
public void closeForApi(CameraDto cameraDto) {
cameraDto.setEnabledFlv(false);
if (cameras.containsKey(cameraDto.getMediaKey())) {
MediaTransfer mediaConvert = cameras.get(cameraDto.getMediaKey());
if(mediaConvert instanceof MediaTransferFlvByJavacv) {
if (mediaConvert instanceof MediaTransferFlvByJavacv) {
MediaTransferFlvByJavacv mediaTransferFlvByJavacv = (MediaTransferFlvByJavacv) mediaConvert;
mediaTransferFlvByJavacv.setRunning(false);
cameras.remove(cameraDto.getMediaKey());
@@ -192,4 +195,62 @@ public class MediaService {
}
}
/* =========================== 新增便捷方法 =========================== */
/** 直接从缓存取 MediaTransfer可能是 FFmpeg 或 JavaCV。不存在返回 null。 */
public MediaTransfer getMedia(String mediaKey) {
return cameras.get(mediaKey);
}
/** 只取 JavaCV 实例;如果不是 JavaCV 或不存在则返回 null。 */
public MediaTransferFlvByJavacv getJavacv(String mediaKey) {
MediaTransfer mt = cameras.get(mediaKey);
return (mt instanceof MediaTransferFlvByJavacv) ? (MediaTransferFlvByJavacv) mt : null;
}
/**
* 取或启动 JavaCV 实例:
* - 已有 JavaCV直接返回
* - 已有 FFmpeg先停止 FFmpeg再切换 JavaCV
* - 不存在:启动 JavaCV
*
* @param cameraDto 需包含 url / mediaKeymediaKey 为空则用 url 的 MD5 生成)
* @param beforeStart 启动前对 cameraDto 做一次定制(可 null例如 dto -> dto.setEnableDetection(true)
*/
public MediaTransferFlvByJavacv getOrStartJavacv(CameraDto cameraDto, Consumer<CameraDto> beforeStart) {
// 兜底 mediaKey
if (cameraDto.getMediaKey() == null || cameraDto.getMediaKey().isEmpty()) {
String mediaKey = MD5.create().digestHex(cameraDto.getUrl());
cameraDto.setMediaKey(mediaKey);
}
MediaTransfer mt = cameras.get(cameraDto.getMediaKey());
if (mt instanceof MediaTransferFlvByJavacv) {
return (MediaTransferFlvByJavacv) mt;
}
// 若已存在 FFmpeg 实例,先停掉
if (mt instanceof MediaTransferFlvByFFmpeg) {
((MediaTransferFlvByFFmpeg) mt).stopFFmpeg();
cameras.remove(cameraDto.getMediaKey());
}
// 启动 JavaCV
if (beforeStart != null) beforeStart.accept(cameraDto);
MediaTransferFlvByJavacv mediaConvert = new MediaTransferFlvByJavacv(cameraDto);
cameras.put(cameraDto.getMediaKey(), mediaConvert);
ThreadUtil.execute(mediaConvert);
return mediaConvert;
}
/** 可选:根据 mediaKey 强制停止并移除(两种实现都兼容) */
public void stopByMediaKey(String mediaKey) {
MediaTransfer mt = cameras.get(mediaKey);
if (mt instanceof MediaTransferFlvByJavacv) {
((MediaTransferFlvByJavacv) mt).setRunning(false);
} else if (mt instanceof MediaTransferFlvByFFmpeg) {
((MediaTransferFlvByFFmpeg) mt).stopFFmpeg();
}
cameras.remove(mediaKey);
}
}

View File

@@ -0,0 +1,126 @@
package com.ruoyi.video.service.impl;
import com.ruoyi.common.config.RuoYiConfig;
import com.ruoyi.common.utils.file.FileUploadUtils;
import com.ruoyi.video.domain.Device;
import com.ruoyi.video.service.IDeviceService;
import com.ruoyi.video.service.ImageStoreService;
import com.ruoyi.video.service.MediaService;
import com.ruoyi.video.thread.MediaTransferFlvByJavacv;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.bytedeco.javacpp.BytePointer;
import org.bytedeco.opencv.opencv_core.Mat;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import static org.bytedeco.opencv.global.opencv_imgcodecs.imencode;
/**
* 后端从视频流抓帧 -> JPEG -> 包成 MultipartFile -> 交给 FileUploadUtils.upload 存储。
*/
@Slf4j
@Service
@RequiredArgsConstructor
public class FileImageStoreServiceImpl implements ImageStoreService {
private final IDeviceService deviceService;
private final MediaService mediaService;
@Override
public String saveLastAnnotatedFrame(Long deviceId) {
// 1) 定位设备 & JavaCV 实例
Device device = deviceService.selectDeviceByDeviceId(deviceId);
if (device == null) throw new IllegalArgumentException("device not found: " + deviceId);
MediaTransferFlvByJavacv mt = mediaService.getJavacv(device.getMediaKey());
if (mt == null) {
throw new IllegalStateException("media (javacv) not running for mediaKey=" + device.getMediaKey());
}
// 2) 取“叠好框”的最近一帧
Mat mat = mt.getLatestAnnotatedFrameCopy();
if (mat == null || mat.empty()) {
throw new IllegalStateException("no annotated frame available currently.");
}
try {
// 3) 编码为 JPEG 字节
BytePointer buf = new BytePointer();
if (!imencode(".jpg", mat, buf) || buf.isNull() || buf.limit() <= 0) {
throw new IllegalStateException("encode jpeg failed.");
}
byte[] bytes = new byte[(int) buf.limit()];
buf.get(bytes);
// 4) 计算保存目录:{profile}/snapshots/device-{deviceId}/
String profile = RuoYiConfig.getProfile();
String uploadBaseDir = Paths.get(profile, "snapshots", "device-" + deviceId).toString();
// 5) 生成文件名(给 MultipartFile 用;最终 FileUploadUtils 会做统一命名与日期分桶)
String fileName = buildFileName(deviceId);
// 6) 把字节包成 MultipartFile走若依工具存储
MultipartFile multipart = new InMemoryMultipartFile(
"file",
fileName,
"image/jpeg",
bytes
);
String stored = FileUploadUtils.upload(uploadBaseDir, multipart);
log.info("snapshot saved by FileUploadUtils: {}", stored);
return stored; // 形如 /profile/snapshots/device-x/20250927/xxx.jpg
} catch (Exception e) {
log.error("saveLastAnnotatedFrame failed: {}", e.getMessage(), e);
throw new RuntimeException("save snapshot failed", e);
} finally {
try { mat.release(); } catch (Exception ignore) {}
}
}
private String buildFileName(Long deviceId) {
ZonedDateTime now = ZonedDateTime.now(ZoneId.systemDefault());
String ts = now.format(DateTimeFormatter.ofPattern("yyyyMMdd_HHmmss_SSS"));
return "cam" + deviceId + "_" + ts + ".jpg";
}
/**
* 轻量内存 MultipartFile不依赖 spring-test。
*/
static class InMemoryMultipartFile implements MultipartFile {
private final String name;
private final String originalFilename;
private final String contentType;
private final byte[] content;
InMemoryMultipartFile(String name, String originalFilename, String contentType, byte[] content) {
this.name = name;
this.originalFilename = originalFilename;
this.contentType = contentType;
this.content = content != null ? content : new byte[0];
}
@Override public String getName() { return name; }
@Override public String getOriginalFilename() { return originalFilename; }
@Override public String getContentType() { return contentType; }
@Override public boolean isEmpty() { return content.length == 0; }
@Override public long getSize() { return content.length; }
@Override public byte[] getBytes() { return content; }
@Override public InputStream getInputStream() { return new ByteArrayInputStream(content); }
@Override public void transferTo(java.io.File dest) throws IOException {
try (var in = getInputStream(); var out = new java.io.FileOutputStream(dest)) {
in.transferTo(out);
}
}
}
}

View File

@@ -181,7 +181,7 @@ public class InspectionTaskServiceImpl implements InspectionTaskService {
// 初始化模型管理器
if (modelManager == null) {
modelManager = new ModelManager();
URL json = getClass().getResource("/models/models.json");
URL json = getClass().getResource("/libs/models/models.json");
if (json != null) {
modelManager.load(json);
}

View File

@@ -1,51 +1,81 @@
package com.ruoyi.video.thread;
import com.ruoyi.video.common.ClientType;
import com.ruoyi.video.common.ModelManager; // ★ 新增:多模型管理(见前面提供的类)
import com.ruoyi.video.domain.Detection; // ★ 新增:检测结果(见前面提供的类)
import com.ruoyi.video.common.ModelManager;
import com.ruoyi.video.domain.Detection;
import com.ruoyi.video.domain.dto.CameraDto;
import com.ruoyi.video.service.MediaService;
import com.ruoyi.video.thread.detector.CompositeDetector; // ★ 新增:并行多模型
import com.ruoyi.video.thread.detector.YoloDetector; // ★ 新增:检测接口
import com.ruoyi.video.utils.Overlay; // ★ 新增:画框工具
import com.ruoyi.video.thread.detector.CompositeDetector;
import com.ruoyi.video.thread.detector.YoloDetector;
import com.ruoyi.video.utils.Overlay;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.websocketx.BinaryWebSocketFrame;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.GenericFutureListener;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.bytedeco.ffmpeg.avcodec.AVPacket;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.*;
import org.bytedeco.opencv.opencv_core.Mat;
import org.springframework.scheduling.annotation.Async;
import org.springframework.util.CollectionUtils;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URL;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.LockSupport;
import java.util.Collections;
import static org.bytedeco.opencv.global.opencv_core.CV_8UC3;
/**
* @Author: orange
* @CreateTime: 2025-01-16
* 推流(FLV) + JavaCV 解码/转码 + (可选)YOLO 检测叠框
* - 支持“窗口巡检”:在给定秒数内启用推理与统计,并通过 DetectionListener 回调让上层落库/告警
* - 播放开始可触发 10 秒试跑attachDetectionListener(jobId, deviceId, 10, listener)
*
* 依赖ModelManager / YoloDetector / CompositeDetector / Detection / Overlay / MediaService / CameraDto / ClientType
*
* @author orange
* @since 2025-01-16
*/
@Slf4j
public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable {
/* ===================== 内部回调/统计类型(如已外部定义,可移除) ===================== */
public interface DetectionListener {
/** 每次推理得到 detections 时回调(建议上层做节流) */
void onDetections(Long jobId, Long deviceId, List<Detection> detections, long frameTsMs);
/** 一个“窗口巡检”结束时回调(含统计数据) */
void onWindowFinished(Long jobId, Long deviceId, WindowStats stats);
}
@Data
public static class WindowStats {
private int frames;
private int detectedFrames;
private int objects;
private double maxScore;
private long startMs;
private long endMs;
}
/* ===================== FFmpeg/JavaCV 初始化 ===================== */
static {
avutil.av_log_set_level(avutil.AV_LOG_ERROR);
FFmpegLogCallback.set();
}
/*** ====== 原有字段 ====== ***/
/* ===================== 原有字段 ===================== */
private final ConcurrentHashMap<String, ChannelHandlerContext> wsClients = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, ChannelHandlerContext> httpClients = new ConcurrentHashMap<>();
@@ -61,76 +91,170 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
private FFmpegFrameGrabber grabber; // 拉流器
private FFmpegFrameRecorder recorder; // 推流录制器
/** true:转复用,false:转码 */
private boolean transferFlag = false; // 默认转码
/** true: 转复用false: 转码。启用检测时强制转码(要在像素上叠框) */
private boolean transferFlag = false;
private final CameraDto cameraDto;
private Thread listenThread;
/*** ====== 新增:推理相关字段 ====== ***/
// 开关:是否启用检测(可对外提供 setter
/* ===================== 推理相关字段 ===================== */
// 外部开关:是否启用检测(默认启用;也可由任务/页面配置动态设置)
private boolean enableDetection = true;
// 模型与推理
private ModelManager modelManager;
private YoloDetector detector;
// 三线程解耦所需
// 解码/推理/发送解耦
private final OpenCVFrameConverter.ToMat toMat = new OpenCVFrameConverter.ToMat();
private final OpenCVFrameConverter.ToMat matToFrameConverter = new OpenCVFrameConverter.ToMat();
private final AtomicReference<Mat> latestFrame = new AtomicReference<>();
private final AtomicReference<List<Detection>> latestDetections =
new AtomicReference<>(java.util.Collections.emptyList());
// 窗口巡检控制
private volatile boolean windowMode = false;
private volatile long windowEndMs = 0L;
private Long currentJobId;
private Long currentDeviceId;
private DetectionListener detectionListener;
private final WindowStats stats = new WindowStats();
// 导出最近一次“叠好框的帧”用于截图存证
private final AtomicReference<Mat> latestAnnotatedFrame = new AtomicReference<>();
public MediaTransferFlvByJavacv(CameraDto cameraDto) {
super();
this.cameraDto = cameraDto;
}
public void setRunning(boolean running) {
boolean prev = this.running;
this.running = running;
// 如果是从 true -> false则按“关闭”处理
if (prev && !running) {
try {
closeMedia(); // 内部会 stopWindowIfAny()、关闭连接等
} catch (Exception ignore) {}
}
}
/** 推荐的新接口:显式停止并释放资源 */
public void stop() {
setRunning(false);
}
/* ===================== 外部控制 API ===================== */
public boolean isRunning() { return running; }
public void setRunning(boolean running) { this.running = running; }
public boolean isGrabberStatus() { return grabberStatus; }
public void setGrabberStatus(boolean grabberStatus) { this.grabberStatus = grabberStatus; }
public boolean isRecorderStatus() { return recorderStatus; }
public void setRecorderStatus(boolean recorderStatus) { this.recorderStatus = recorderStatus; }
public void setEnableDetection(boolean enable) { this.enableDetection = enable; }
/*** ====== 推理初始化 ====== ***/
/**
* 开启一个“窗口巡检”,持续 windowSeconds 秒;期间每次推理回调 onDetections结束时 onWindowFinished
*/
public void attachDetectionListener(Long jobId, Long deviceId, int windowSeconds, DetectionListener listener) {
if (windowSeconds <= 0 || listener == null) return;
this.currentJobId = jobId;
this.currentDeviceId = deviceId;
this.detectionListener = listener;
this.windowMode = true;
long now = System.currentTimeMillis();
this.stats.setStartMs(now);
this.windowEndMs = now + windowSeconds * 1000L;
this.stats.setFrames(0);
this.stats.setDetectedFrames(0);
this.stats.setObjects(0);
this.stats.setMaxScore(0.0);
log.info("[job:{} device:{}] window started {}s", jobId, deviceId, windowSeconds);
}
/** 主动结束当前窗口(可用于任务被中断的场景) */
public void stopWindowIfAny() {
if (!windowMode) return;
this.windowMode = false;
stats.setEndMs(System.currentTimeMillis());
if (detectionListener != null && currentJobId != null && currentDeviceId != null) {
try {
detectionListener.onWindowFinished(currentJobId, currentDeviceId, cloneStats(stats));
} catch (Exception ignore) {}
}
currentJobId = null;
currentDeviceId = null;
detectionListener = null;
log.info("window finished (stopWindowIfAny)");
}
/** 导出最近一次“叠好框的帧”(深拷贝),用于截图/存证。调用方负责释放 Mat */
public Mat getLatestAnnotatedFrameCopy() {
Mat src = latestAnnotatedFrame.get();
if (src == null || src.empty()) return null;
Mat copy = new Mat(src.rows(), src.cols(), src.type());
src.copyTo(copy);
return copy;
}
/* ===================== 初始化推理 ===================== */
private void initDetectors() throws Exception {
if (!enableDetection) return;
modelManager = new ModelManager();
URL json = getClass().getResource("/models/models.json");
modelManager.load(json);
// 单模型: detector = modelManager.get("person-helmet");
// 多模型并行示例并行度按CPU核数/模型大小调整:
// 你可按需切换单模型或多模型并行
// detector = modelManager.get("person-helmet");
detector = new CompositeDetector(
"all-models",
java.util.List.of(modelManager.get("person-helmet"), modelManager.get("vehicle-plate")),
2
java.util.List.of(
modelManager.get("person-helmet"),
modelManager.get("vehicle-plate")
),
2 // 并行度
);
log.info("YOLO detectors ready: {}", detector.name());
// 预热一次,避免前几帧“无框”
try {
Frame warm = grabber != null ? grabber.grabImage() : null;
if (warm != null) {
Mat wm = toMat.convert(warm);
if (wm != null && !wm.empty()) {
long t0 = System.currentTimeMillis();
List<Detection> dets = detector.detect(wm);
long cost = System.currentTimeMillis() - t0;
latestDetections.set(dets);
log.info("Detector warm-up OK, cost={}ms, dets={}", cost,
CollectionUtils.isEmpty(dets) ? 0 : dets.size());
}
}
} catch (Throwable e) {
log.warn("Detector warm-up failed: {}", e.getMessage());
}
}
/*** ====== 拉流器 ====== ***/
/* ===================== 拉流/推流 ===================== */
protected boolean createGrabber() {
grabber = new FFmpegFrameGrabber(cameraDto.getUrl());
// 注意:这些是微秒字符串
String fiveSecUs = "5000000";
String oneMb = "1048576";
grabber.setOption("threads", "1");
grabber.setOption("buffer_size", oneMb);
grabber.setOption("rw_timeout", fiveSecUs);
grabber.setOption("stimeout", fiveSecUs);
grabber.setOption("probesize", "1048576"); // ← 修正probesize 是“字节”
grabber.setOption("stimeout", fiveSecUs);
grabber.setOption("probesize", "1048576");
grabber.setOption("analyzeduration", fiveSecUs);
grabber.setOption("fflags", "nobuffer");
grabber.setOption("flags", "low_delay");
grabber.setOption("loglevel", "error"); // 稳定后压低日志
grabber.setOption("flags", "low_delay");
grabber.setOption("loglevel", "error");
if (cameraDto.getUrl().toLowerCase().startsWith("rtsp://")) {
grabber.setOption("rtsp_transport", "tcp"); // 你要测 UDP 再改
grabber.setOption("rtsp_transport", "tcp");
grabber.setOption("allowed_media_types", "video");
grabber.setOption("max_delay", "500000");
grabber.setOption("user_agent", "Lavf/60");
@@ -147,19 +271,17 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
try {
grabber.start();
log.info("\n{}\n启动拉流器成功", cameraDto.getUrl());
log.info("启动拉流器成功: {}", cameraDto.getUrl());
return (grabberStatus = true);
} catch (FrameGrabber.Exception e) {
MediaService.cameras.remove(cameraDto.getMediaKey());
log.error("\n{}\n启动拉流器失败网络超时或视频源不可用{}",
cameraDto.getUrl(), e.getMessage());
log.error("启动拉流器失败: {} ({})", cameraDto.getUrl(), e.getMessage());
return (grabberStatus = false);
}
}
/*** ====== 录制器(转码/转复用) ====== ***/
protected boolean createTransterOrRecodeRecorder() {
// 启用检测必须转码(因为需要像素上画框)
// 启用检测必须转码(需要像素级叠框)
if (enableDetection) transferFlag = false;
recorder = new FFmpegFrameRecorder(bos, grabber.getImageWidth(), grabber.getImageHeight(),
@@ -167,7 +289,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
recorder.setFormat("flv");
if (!transferFlag) {
// 转码低延迟 H.264
// 转码低延迟 H.264
recorder.setInterleaved(false);
recorder.setVideoOption("tune", "zerolatency");
recorder.setVideoOption("preset", "ultrafast");
@@ -188,20 +310,20 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
MediaService.cameras.remove(cameraDto.getMediaKey());
}
} else {
// 转复用(不画框时可用
// 转复用(仅不叠框时
recorder.setCloseOutputStream(false);
try {
recorder.start(grabber.getFormatContext());
return recorderStatus = true;
} catch (FrameRecorder.Exception e) {
log.warn("\r\n{}\r\n启动转复用录制器失败,自动切换转码", cameraDto.getUrl());
log.warn("{} 启动转复用失败,自动切换转码", cameraDto.getUrl());
transferFlag = false;
try { recorder.stop(); } catch (FrameRecorder.Exception ignored) {}
if (createTransterOrRecodeRecorder()) {
log.error("\r\n{}\r\n切换到转码模式", cameraDto.getUrl());
log.error("{} 切换到转码模式", cameraDto.getUrl());
return true;
}
log.error("\r\n{}\r\n切换转码模式失败", cameraDto.getUrl(), e);
log.error("{} 切换转码模式失败", cameraDto.getUrl(), e);
}
}
return recorderStatus = false;
@@ -216,23 +338,20 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
&& (avcodec.AV_CODEC_ID_AAC == acodec || avcodec.AV_CODEC_ID_AAC_LATM == acodec);
}
/*** ====== 主流程:转换为 FLV 并输出 ====== ***/
/* ===================== 主流程 ===================== */
protected void transferStream2Flv() {
try {
if (enableDetection) initDetectors();
} catch (Exception e) {
log.error("初始化检测模型失败:{}", e.getMessage(), e);
// 模型失败也不中断推流,只是不画框
enableDetection = false;
}
if (!createGrabber()) return;
// 如果未启用检测,且编解码本身支持 FLV可以转复用提升性能
if (!enableDetection) transferFlag = supportFlvFormatCodec();
if (!createTransterOrRecodeRecorder()) return;
try { grabber.flush(); } catch (FrameGrabber.Exception e) { log.info("清空拉流器缓存失败", e); }
try { grabber.flush(); } catch (FrameGrabber.Exception e) { log.debug("flush grabber fail", e); }
if (header == null) {
header = bos.toByteArray();
bos.reset();
@@ -242,77 +361,21 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
listenClient();
long startTime = 0;
long videoTS = 0;
// === 若启用检测,启动“解码→推理→渲染”解耦线程 ===
Thread tDecode = null, tInfer = null;
if (enableDetection) {
// 解码线程:仅更新 latestFrame覆盖式不阻塞
tDecode = new Thread(() -> {
while (running && grabberStatus) {
try {
Frame f = grabber.grabImage();
if (f == null) continue;
Mat m = toMat.convert(f);
if (m == null || m.empty()) continue;
Mat copy = new Mat(m.rows(), m.cols(), CV_8UC3);
m.copyTo(copy);
Mat old = latestFrame.getAndSet(copy);
if (old != null) old.release();
} catch (Exception e) {
log.debug("decode err: {}", e.getMessage());
}
}
}, "det-decode");
// 推理线程:限速(默认 15 FPS更新 latestDetections
int inferFps = 15;
long period = 1_000_000_000L / inferFps;
tInfer = new Thread(() -> {
long next = System.nanoTime();
while (running && grabberStatus) {
long now = System.nanoTime();
if (now < next) { LockSupport.parkNanos(next - now); continue; }
next += period;
Mat src = latestFrame.get();
if (src == null || src.empty()) continue;
Mat snap = new Mat(); src.copyTo(snap);
try {
List<Detection> dets = detector.detect(snap);
latestDetections.set(dets);
} catch (Throwable e) {
log.debug("infer err: {}", e.getMessage());
} finally {
snap.release();
}
}
}, "det-infer");
// 抢先预热一次,避免前几帧无框
try {
Frame warm = grabber.grabImage();
if (warm != null) {
Mat wm = toMat.convert(warm);
if (wm != null && !wm.empty() && detector != null) {
latestDetections.set(detector.detect(wm));
}
}
} catch (Exception ignored) { }
tDecode.start();
tInfer.start();
}
// === 主发送循环(转复用/转码两种路径) ===
long videoTS;
// 检测频率控制变量
final long DETECTION_INTERVAL_MS = 3000; // 每3秒检测一次
long lastDetectionTime = 0;
List<Detection> currentDetections = Collections.emptyList(); // 当前显示的检测结果
for (; running && grabberStatus && recorderStatus; ) {
try {
if (transferFlag) {
// ---- 转复用(不画框)----
// 仅转复用(未叠框)
long startGrab = System.currentTimeMillis();
AVPacket pkt = grabber.grabPacket();
if ((System.currentTimeMillis() - startGrab) > 5000) {
log.info("\r\n{}\r\n视频流网络异常>>>", cameraDto.getUrl());
log.info("{} 网络异常(复用)", cameraDto.getUrl());
closeMedia();
break;
}
@@ -323,66 +386,227 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
recorder.recordPacket(pkt);
}
} else {
// ---- 转码(可框)----
// 转码(可框)
long startGrab = System.currentTimeMillis();
Frame frame;
if (enableDetection) {
// 如果启用检测,解码线程已在跑;这里直接从 latestFrame 取,减少重复解码
Mat src = latestFrame.get();
if (src == null || src.empty()) continue;
// 叠加最近一次检测结果
Overlay.draw(latestDetections.get(), src);
frame = toMat.convert(src);
} else {
// 未开启检测:直接 grab 并转码
frame = grabber.grab();
}
Frame frame = grabber.grab();
if ((System.currentTimeMillis() - startGrab) > 5000) {
log.info("\r\n{}\r\n视频流网络异常>>>", cameraDto.getUrl());
log.info("{} 网络异常(转码)", cameraDto.getUrl());
closeMedia();
break;
}
if (frame != null && enableDetection) {
// 将Frame转换为Mat以进行处理
Mat mat = toMat.convert(frame);
if (mat != null && !mat.empty()) {
long currentTime = System.currentTimeMillis();
// 每隔DETECTION_INTERVAL_MS执行一次检测
if (currentTime - lastDetectionTime >= DETECTION_INTERVAL_MS) {
try {
log.debug("执行新一轮检测,上次检测时间: {}ms前",
currentTime - lastDetectionTime);
// 创建副本进行检测
Mat detectionMat = new Mat();
mat.copyTo(detectionMat);
// 执行检测
currentDetections = detector.detect(detectionMat);
lastDetectionTime = currentTime;
latestDetections.set(currentDetections);
// 释放检测Mat
detectionMat.release();
// 窗口巡检回调
if (windowMode && detectionListener != null &&
currentJobId != null && currentDeviceId != null) {
detectionListener.onDetections(currentJobId,
currentDeviceId,
currentDetections,
currentTime);
}
log.debug("检测完成,发现 {} 个目标框将保持3秒",
currentDetections == null ? 0 : currentDetections.size());
} catch (Exception e) {
log.debug("检测异常: {}", e.getMessage());
}
}
// 每一帧都使用最新的检测结果绘制框
// 这样框会保持在原位置,直到下一次检测更新
if (currentDetections != null && !currentDetections.isEmpty()) {
try {
// 在当前帧上绘制检测框
Overlay.draw(currentDetections, mat);
} catch (Exception e) {
log.debug("绘制检测框异常: {}", e.getMessage());
}
}
// 更新"最近叠好框的帧"用于存证
updateLatestAnnotated(mat);
// 统计(仅窗口巡检时)
if (windowMode) updateStats(currentDetections);
// 窗口结束判定
if (windowMode && System.currentTimeMillis() >= windowEndMs) {
finishWindow();
}
// 将处理后的Mat转换回Frame
try {
// 创建新的转换器
OpenCVFrameConverter.ToMat converter = new OpenCVFrameConverter.ToMat();
Frame processedFrame = converter.convert(mat);
if (processedFrame != null) {
// 使用处理后的帧替换原始帧
frame = processedFrame;
}
} catch (Exception e) {
log.debug("Mat转Frame异常: {}", e.getMessage());
// 如果转换失败,继续使用原始帧
}
// 释放Mat
mat.release();
}
}
// 记录帧
if (frame != null) {
if (startTime == 0) startTime = System.currentTimeMillis();
videoTS = 1000 * (System.currentTimeMillis() - startTime);
long now = System.currentTimeMillis();
if (startTime == 0) startTime = now;
videoTS = 1000 * (now - startTime);
if (videoTS > recorder.getTimestamp()) recorder.setTimestamp(videoTS);
recorder.record(frame);
}
}
} catch (FrameGrabber.Exception e) {
log.error("拉流异常: {}", e.getMessage());
grabberStatus = false;
MediaService.cameras.remove(cameraDto.getMediaKey());
} catch (FrameRecorder.Exception e) {
log.error("推流异常: {}", e.getMessage());
recorderStatus = false;
MediaService.cameras.remove(cameraDto.getMediaKey());
} catch (Exception e) {
log.error("其他异常: {}", e.getMessage());
// 不要立即退出,尝试继续处理
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
}
if (bos.size() > 0) {
byte[] b = bos.toByteArray();
bos.reset();
sendFrameData(b);
// 输出缓存到客户端
try {
if (bos.size() > 0) {
byte[] b = bos.toByteArray();
bos.reset();
sendFrameData(b);
}
} catch (Exception e) {
log.error("发送数据异常: {}", e.getMessage());
}
}
// === 收尾 ===
try {
if (detector != null) try { detector.close(); } catch (Exception ignored) {}
if (modelManager != null) try { modelManager.close(); } catch (Exception ignored) {}
if (recorder != null) recorder.close();
if (grabber != null) grabber.close();
bos.close();
} catch (Exception ignored) {
} finally {
Mat m = latestFrame.getAndSet(null);
if (m != null) m.release();
closeMedia();
}
log.info("关闭媒体流-javacv{} ", cameraDto.getUrl());
// 安全地关闭资源
safeCloseResources();
}
/*** ====== 网络发送(原样保留) ====== ***/
// 将资源关闭逻辑提取到单独的方法
private void safeCloseResources() {
try {
if (detector != null) {
try { detector.close(); } catch (Exception ignored) {}
}
if (modelManager != null) {
try { modelManager.close(); } catch (Exception ignored) {}
}
if (recorder != null) {
try { recorder.close(); } catch (Exception ignored) {}
}
if (grabber != null) {
try { grabber.close(); } catch (Exception ignored) {}
}
try { bos.close(); } catch (Exception ignored) {}
Mat m = latestFrame.getAndSet(null);
if (m != null) {
try { m.release(); } catch (Exception ignored) {}
}
Mat a = latestAnnotatedFrame.getAndSet(null);
if (a != null) {
try { a.release(); } catch (Exception ignored) {}
}
closeMedia();
} catch (Exception e) {
log.error("关闭资源异常: {}", e.getMessage());
}
log.info("关闭媒体流-javacv: {}", cameraDto.getUrl());
}
/* ===================== 统计 / 窗口结束 ===================== */
private void updateStats(List<Detection> dets) {
stats.setFrames(stats.getFrames() + 1);
if (dets != null && !dets.isEmpty()) {
stats.setDetectedFrames(stats.getDetectedFrames() + 1);
stats.setObjects(stats.getObjects() + dets.size());
double localMax = dets.stream().mapToDouble(Detection::conf).max().orElse(0.0);
if (localMax > stats.getMaxScore()) stats.setMaxScore(localMax);
}
}
private void finishWindow() {
windowMode = false;
stats.setEndMs(System.currentTimeMillis());
if (detectionListener != null && currentJobId != null && currentDeviceId != null) {
try { detectionListener.onWindowFinished(currentJobId, currentDeviceId, cloneStats(stats)); }
catch (Exception ignore) {}
}
currentJobId = null;
currentDeviceId = null;
detectionListener = null;
log.info("window finished (timeout)");
}
private static WindowStats cloneStats(WindowStats s) {
WindowStats c = new WindowStats();
c.setFrames(s.getFrames());
c.setDetectedFrames(s.getDetectedFrames());
c.setObjects(s.getObjects());
c.setMaxScore(s.getMaxScore());
c.setStartMs(s.getStartMs());
c.setEndMs(s.getEndMs());
return c;
}
private void updateLatestAnnotated(Mat src) {
if (src == null || src.empty()) return;
Mat copy = new Mat(src.rows(), src.cols(), src.type());
src.copyTo(copy);
Mat old = latestAnnotatedFrame.getAndSet(copy);
if (old != null) old.release();
}
/* ===================== 网络发送/连接管理 ===================== */
private void sendFrameData(byte[] data) {
// ws
for (Map.Entry<String, ChannelHandlerContext> entry : wsClients.entrySet()) {
@@ -396,7 +620,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
} catch (Exception e) {
wsClients.remove(entry.getKey());
hasClient();
e.printStackTrace();
log.debug("ws send err", e);
}
}
// http
@@ -411,7 +635,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
} catch (Exception e) {
httpClients.remove(entry.getKey());
hasClient();
e.printStackTrace();
log.debug("http send err", e);
}
}
}
@@ -422,7 +646,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
if (hcSize != newHcSize || wcSize != newWcSize) {
hcSize = newHcSize;
wcSize = newWcSize;
log.info("\r\n{}\r\nhttp连接数{}, ws连接数{} \r\n", cameraDto.getUrl(), newHcSize, newWcSize);
log.info("{} http连接数{}, ws连接数{}", cameraDto.getUrl(), newHcSize, newWcSize);
}
if (!cameraDto.isAutoClose()) return;
@@ -448,15 +672,16 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
}
private void closeMedia() {
// 结束窗口(如果还在)
stopWindowIfAny();
running = false;
MediaService.cameras.remove(cameraDto.getMediaKey());
for (Map.Entry<String, ChannelHandlerContext> entry : wsClients.entrySet()) {
try { entry.getValue().close(); } catch (Exception ignored) {}
finally { wsClients.remove(entry.getKey()); }
try { entry.getValue().close(); } catch (Exception ignored) {} finally { wsClients.remove(entry.getKey()); }
}
for (Map.Entry<String, ChannelHandlerContext> entry : httpClients.entrySet()) {
try { entry.getValue().close(); } catch (Exception ignored) {}
finally { httpClients.remove(entry.getKey()); }
try { entry.getValue().close(); } catch (Exception ignored) {} finally { httpClients.remove(entry.getKey()); }
}
}
@@ -491,7 +716,7 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
}
}
} catch (Exception e) {
e.printStackTrace();
log.debug("send header err", e);
}
break;
}
@@ -499,10 +724,13 @@ public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable
timeout += 50;
if (timeout > 30000) break;
} catch (Exception e) {
e.printStackTrace();
log.debug("addClient err", e);
}
}
}
@Override public void run() { transferStream2Flv(); }
@Override
public void run() {
transferStream2Flv();
}
}

View File

@@ -0,0 +1,29 @@
package com.ruoyi.video.thread.detector;
import com.ruoyi.video.domain.Detection;
import org.bytedeco.opencv.opencv_core.*;
import java.util.*;
/**
* 空检测器,用于在模型加载失败时提供回退机制
*/
public class DummyDetector implements YoloDetector {
private final String name;
public DummyDetector(String name) {
this.name = name;
System.out.println("警告: 使用DummyDetector替代真实模型 - " + name);
}
@Override
public String name() {
return name;
}
@Override
public List<Detection> detect(Mat bgr) {
// 返回空列表,不进行实际检测
return Collections.emptyList();
}
}

View File

@@ -0,0 +1,249 @@
package com.ruoyi.video.thread.detector;
import com.ruoyi.video.domain.Detection;
import org.bytedeco.javacpp.indexer.FloatRawIndexer;
import org.bytedeco.opencv.opencv_core.*;
import org.bytedeco.opencv.opencv_dnn.Net;
import java.nio.file.*;
import java.util.*;
import static org.bytedeco.opencv.global.opencv_dnn.*;
import static org.bytedeco.opencv.global.opencv_core.*;
import static org.bytedeco.opencv.global.opencv_imgproc.*;
public final class OnnxYoloDetector implements YoloDetector {
private final String modelName;
private final Net net;
private final Size input;
private final float confTh = 0.25f, nmsTh = 0.45f;
private final String[] classes;
private final int colorBGR;
public OnnxYoloDetector(String name, Path dir, int inW, int inH, String backend, int colorBGR) throws Exception {
this.modelName = name;
this.input = new Size(inW, inH);
this.colorBGR = colorBGR;
// 查找ONNX模型文件
String onnx = findModelFile(dir, ".onnx");
if (onnx == null) {
throw new Exception("找不到ONNX模型文件请确保目录中存在 .onnx 文件: " + dir);
}
// 读取类别文件
Path clsPath = dir.resolve("classes.txt");
if (Files.exists(clsPath)) {
this.classes = Files.readAllLines(clsPath).stream().map(String::trim)
.filter(s -> !s.isEmpty()).toArray(String[]::new);
} else {
this.classes = new String[0];
}
try {
// 加载ONNX模型
this.net = readNetFromONNX(onnx);
// 设置OpenCV后端
net.setPreferableBackend(DNN_BACKEND_OPENCV);
net.setPreferableTarget(DNN_TARGET_CPU);
System.out.println("ONNX模型加载成功: " + name + " (" + onnx + ")");
} catch (Exception e) {
throw new Exception("模型加载失败: " + e.getMessage() +
"\n请确保ONNX模型文件格式正确", e);
}
}
/**
* 在目录中查找指定扩展名的模型文件
*/
private String findModelFile(Path dir, String extension) {
try {
return Files.list(dir)
.filter(path -> path.toString().toLowerCase().endsWith(extension.toLowerCase()))
.map(Path::toString)
.findFirst()
.orElse(null);
} catch (Exception e) {
return null;
}
}
@Override public String name() { return modelName; }
@Override
public List<Detection> detect(Mat bgr) {
if (bgr == null || bgr.empty()) return Collections.emptyList();
// 统一成 BGR 3 通道,避免 blobFromImage 断言失败
if (bgr.channels() != 3) {
Mat tmp = new Mat();
if (bgr.channels() == 1) cvtColor(bgr, tmp, COLOR_GRAY2BGR);
else if (bgr.channels() == 4) cvtColor(bgr, tmp, COLOR_BGRA2BGR);
else bgr.copyTo(tmp);
bgr = tmp;
}
try (Mat blob = blobFromImage(bgr, 1.0/255.0, input, new Scalar(0.0), true, false, CV_32F)) {
net.setInput(blob);
// ===== 多输出兼容Bytedeco 正确写法)=====
org.bytedeco.opencv.opencv_core.StringVector outNames = net.getUnconnectedOutLayersNames();
List<Mat> outs = new ArrayList<>();
if (outNames == null || outNames.size() == 0) {
// 只有一个默认输出
Mat out = net.forward(); // ← 直接返回 Mat
outs.add(out);
} else {
// 多输出:用 MatVector 承接
org.bytedeco.opencv.opencv_core.MatVector outBlobs =
new org.bytedeco.opencv.opencv_core.MatVector(outNames.size());
net.forward(outBlobs, outNames); // ← 正确的重载
for (long i = 0; i < outBlobs.size(); i++) {
outs.add(outBlobs.get(i));
}
}
int fw = bgr.cols(), fh = bgr.rows();
List<Rect2d> boxes = new ArrayList<>();
List<Float> scores = new ArrayList<>();
List<Integer> classIds = new ArrayList<>();
for (Mat out : outs) {
parseYoloOutput(out, fw, fh, boxes, scores, classIds);
}
if (boxes.isEmpty()) return Collections.emptyList();
// 纯 Java NMS避免 MatOf* / Vector API 兼容问题
List<Integer> keep = nmsIndices(boxes, scores, nmsTh);
List<Detection> result = new ArrayList<>(keep.size());
for (int k : keep) {
Rect2d r = boxes.get(k);
Rect rect = new Rect((int)r.x(), (int)r.y(), (int)r.width(), (int)r.height());
int cid = classIds.get(k);
String cname = (cid >= 0 && cid < classes.length) ? classes[cid] : ("cls"+cid);
result.add(new Detection("["+modelName+"] "+cname, scores.get(k), rect, colorBGR));
}
return result;
} catch (Throwable e) {
// 单帧失败不影响整体
return Collections.emptyList();
}
}
/** 解析 YOLO-IR 输出为 N×CC>=6并填充 boxes/scores/classIds。 */
private void parseYoloOutput(Mat out, int fw, int fh,
List<Rect2d> boxes, List<Float> scores, List<Integer> classIds) {
int dims = out.dims();
Mat m;
if (dims == 2) {
// NxC 或 CxN
if (out.cols() >= 6) {
m = out;
} else {
Mat tmp = new Mat();
transpose(out, tmp); // CxN -> NxC
m = tmp;
}
} else if (dims == 3) {
// [1,N,C] 或 [1,C,N]
if (out.size(2) >= 6) {
m = out.reshape(1, out.size(1)); // -> N×C
} else {
Mat squeezed = out.reshape(1, out.size(1)); // C×N
Mat tmp = new Mat();
transpose(squeezed, tmp); // -> N×C
m = tmp;
}
} else if (dims == 4) {
// [1,1,N,C] 或 [1,1,C,N]
int a = out.size(2), b = out.size(3);
if (b >= 6) {
m = out.reshape(1, a).clone(); // -> N×C
} else {
Mat cxn = out.reshape(1, b); // C×N
Mat tmp = new Mat();
transpose(cxn, tmp); // -> N×C
m = tmp.clone();
}
} else {
return; // 不支持的形状
}
int N = m.rows(), C = m.cols();
if (C < 6 || N <= 0) return;
FloatRawIndexer idx = m.createIndexer();
for (int i = 0; i < N; i++) {
float cx = idx.get(i,0), cy = idx.get(i,1), w = idx.get(i,2), h = idx.get(i,3);
float obj = idx.get(i,4);
int bestCls = -1; float bestScore = 0f;
for (int c = 5; c < C; c++) {
float p = idx.get(i,c);
if (p > bestScore) { bestScore = p; bestCls = c - 5; }
}
float conf = obj * bestScore;
if (conf < confTh) continue;
// 默认假设归一化中心点格式 (cx,cy,w,h);若你的 IR 是 x1,y1,x2,y2请把这里换算改掉
int bx = Math.max(0, Math.round(cx * fw - (w * fw) / 2f));
int by = Math.max(0, Math.round(cy * fh - (h * fh) / 2f));
int bw = Math.min(fw - bx, Math.round(w * fw));
int bh = Math.min(fh - by, Math.round(h * fh));
if (bw <= 0 || bh <= 0) continue;
boxes.add(new Rect2d(bx, by, bw, bh));
scores.add(conf);
classIds.add(bestCls);
}
}
/** 纯 Java NMSIoU 抑制),返回保留的下标列表。 */
private List<Integer> nmsIndices(List<Rect2d> boxes, List<Float> scores, float nmsThreshold) {
List<Integer> order = new ArrayList<>(boxes.size());
for (int i = 0; i < boxes.size(); i++) order.add(i);
// 按分数降序
order.sort((i, j) -> Float.compare(scores.get(j), scores.get(i)));
List<Integer> keep = new ArrayList<>();
boolean[] removed = new boolean[boxes.size()];
for (int a = 0; a < order.size(); a++) {
int i = order.get(a);
if (removed[i]) continue;
keep.add(i);
Rect2d bi = boxes.get(i);
double areaI = bi.width() * bi.height();
for (int b = a + 1; b < order.size(); b++) {
int j = order.get(b);
if (removed[j]) continue;
Rect2d bj = boxes.get(j);
double areaJ = bj.width() * bj.height();
double xx1 = Math.max(bi.x(), bj.x());
double yy1 = Math.max(bi.y(), bj.y());
double xx2 = Math.min(bi.x() + bi.width(), bj.x() + bj.width());
double yy2 = Math.min(bi.y() + bi.height(), bj.y() + bj.height());
double w = Math.max(0, xx2 - xx1);
double h = Math.max(0, yy2 - yy1);
double inter = w * h;
double iou = inter / (areaI + areaJ - inter + 1e-9);
if (iou > nmsThreshold) removed[j] = true;
}
}
return keep;
}
@Override public void close(){ net.close(); }
}

View File

@@ -25,9 +25,15 @@ public final class OpenVinoYoloDetector implements YoloDetector {
this.input = new Size(inW, inH);
this.colorBGR = colorBGR;
String xml = dir.resolve("model.xml").toString();
String bin = dir.resolve("model.bin").toString();
// 自动查找模型文件
String xml = findModelFile(dir, ".xml");
String bin = findModelFile(dir, ".bin");
if (xml == null || bin == null) {
throw new Exception("找不到模型文件,请确保目录中存在 .xml 和 .bin 文件: " + dir);
}
// 读取类别文件
Path clsPath = dir.resolve("classes.txt");
if (Files.exists(clsPath)) {
this.classes = Files.readAllLines(clsPath).stream().map(String::trim)
@@ -36,19 +42,34 @@ public final class OpenVinoYoloDetector implements YoloDetector {
this.classes = new String[0];
}
this.net = readNetFromModelOptimizer(xml, bin);
boolean set = false;
if ("openvino".equalsIgnoreCase(backend)) {
try {
net.setPreferableBackend(DNN_BACKEND_INFERENCE_ENGINE);
net.setPreferableTarget(DNN_TARGET_CPU);
set = true;
} catch (Throwable ignore) { /* 回退 */ }
}
if (!set) {
try {
// 加载模型但强制使用OpenCV后端
this.net = readNetFromModelOptimizer(xml, bin);
// 强制使用OpenCV后端避免OpenVINO依赖
net.setPreferableBackend(DNN_BACKEND_OPENCV);
net.setPreferableTarget(DNN_TARGET_CPU);
System.out.println("模型加载成功: " + name + " (使用OpenCV后端)");
} catch (Exception e) {
throw new Exception("模型加载失败: " + e.getMessage() +
"\n请确保模型文件完整且格式正确", e);
}
}
/**
* 在目录中查找指定扩展名的模型文件
*/
private String findModelFile(Path dir, String extension) {
try {
return Files.list(dir)
.filter(path -> path.toString().toLowerCase().endsWith(extension.toLowerCase()))
.map(Path::toString)
.findFirst()
.orElse(null);
} catch (Exception e) {
return null;
}
}
@@ -227,4 +248,4 @@ public final class OpenVinoYoloDetector implements YoloDetector {
}
@Override public void close(){ net.close(); }
}
}

View File

@@ -1,4 +1,4 @@
[
{"name":"smoke","path":"models/smoke","size":[640,640],"backend":"openvino"},
{"name":"garbage","path":"models/garbage","size":[640,640],"backend":"openvino"}
{"name":"smoke","path":"libs/models/smoke","size":[640,640],"backend":"opencv"},
{"name":"garbage","path":"libs/models/garbage","size":[640,640],"backend":"opencv"}
]

View File

@@ -0,0 +1,33 @@
{
"models": [
{
"name": "yolo",
"type": "object_detection",
"modelPath": "models/yolo.onnx",
"configPath": "models/yolo.cfg",
"weightsPath": "models/yolo.weights",
"classNames": [
"person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck",
"boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench",
"bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra",
"giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee",
"skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove",
"skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup",
"fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange",
"broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "sofa",
"pottedplant", "bed", "diningtable", "toilet", "tvmonitor", "laptop", "mouse",
"remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink",
"refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier",
"toothbrush"
],
"inputSize": {
"width": 640,
"height": 640
},
"threshold": 0.5,
"nmsThreshold": 0.4
}
],
"enabled": true,
"defaultModel": "yolo"
}