Files
rtsp-video-analysis-system/ruoyi-video/src/main/java/com/ruoyi/video/thread/MediaTransferFlvByJavacv.java

688 lines
27 KiB
Java
Raw Normal View History

2025-09-26 11:55:38 +08:00
package com.ruoyi.video.thread;
import com.ruoyi.video.common.ClientType;
import com.ruoyi.video.common.ModelManager;
import com.ruoyi.video.domain.Detection;
2025-09-26 11:55:38 +08:00
import com.ruoyi.video.domain.dto.CameraDto;
import com.ruoyi.video.service.MediaService;
import com.ruoyi.video.thread.detector.CompositeDetector;
import com.ruoyi.video.thread.detector.YoloDetector;
import com.ruoyi.video.utils.Overlay;
2025-09-26 11:55:38 +08:00
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.websocketx.BinaryWebSocketFrame;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.GenericFutureListener;
import lombok.Data;
2025-09-26 17:18:13 +08:00
import lombok.extern.slf4j.Slf4j;
import org.bytedeco.ffmpeg.avcodec.AVPacket;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.*;
import org.bytedeco.opencv.opencv_core.Mat;
import org.springframework.util.CollectionUtils;
2025-09-26 11:55:38 +08:00
import java.io.ByteArrayOutputStream;
2025-09-26 17:18:13 +08:00
import java.net.URL;
import java.util.List;
2025-09-26 11:55:38 +08:00
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
2025-09-26 17:18:13 +08:00
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.LockSupport;
2025-09-26 11:55:38 +08:00
2025-09-26 17:18:13 +08:00
import static org.bytedeco.opencv.global.opencv_core.CV_8UC3;
2025-09-26 11:55:38 +08:00
/**
* 推流(FLV) + JavaCV 解码/转码 + (可选)YOLO 检测叠框
* - 支持窗口巡检在给定秒数内启用推理与统计并通过 DetectionListener 回调让上层落库/告警
* - 播放开始可触发 10 秒试跑attachDetectionListener(jobId, deviceId, 10, listener)
*
* 依赖ModelManager / YoloDetector / CompositeDetector / Detection / Overlay / MediaService / CameraDto / ClientType
*
* @author orange
* @since 2025-01-16
2025-09-26 11:55:38 +08:00
*/
@Slf4j
public class MediaTransferFlvByJavacv extends MediaTransfer implements Runnable {
/* ===================== 内部回调/统计类型(如已外部定义,可移除) ===================== */
public interface DetectionListener {
/** 每次推理得到 detections 时回调(建议上层做节流) */
void onDetections(Long jobId, Long deviceId, List<Detection> detections, long frameTsMs);
/** 一个“窗口巡检”结束时回调(含统计数据) */
void onWindowFinished(Long jobId, Long deviceId, WindowStats stats);
}
@Data
public static class WindowStats {
private int frames;
private int detectedFrames;
private int objects;
private double maxScore;
private long startMs;
private long endMs;
}
/* ===================== FFmpeg/JavaCV 初始化 ===================== */
2025-09-26 11:55:38 +08:00
static {
avutil.av_log_set_level(avutil.AV_LOG_ERROR);
FFmpegLogCallback.set();
}
/* ===================== 原有字段 ===================== */
2025-09-26 17:18:13 +08:00
private final ConcurrentHashMap<String, ChannelHandlerContext> wsClients = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, ChannelHandlerContext> httpClients = new ConcurrentHashMap<>();
2025-09-26 11:55:38 +08:00
private volatile boolean running = false;
private boolean grabberStatus = false;
private boolean recorderStatus = false;
private int hcSize, wcSize = 0;
private int noClient = 0;
private byte[] header = null;
2025-09-26 17:18:13 +08:00
private final ByteArrayOutputStream bos = new ByteArrayOutputStream();
2025-09-26 11:55:38 +08:00
2025-09-26 17:18:13 +08:00
private FFmpegFrameGrabber grabber; // 拉流器
private FFmpegFrameRecorder recorder; // 推流录制器
2025-09-26 11:55:38 +08:00
/** true: 转复用false: 转码。启用检测时强制转码(要在像素上叠框) */
private boolean transferFlag = false;
2025-09-26 11:55:38 +08:00
2025-09-26 17:18:13 +08:00
private final CameraDto cameraDto;
2025-09-26 11:55:38 +08:00
private Thread listenThread;
/* ===================== 推理相关字段 ===================== */
// 外部开关:是否启用检测(默认启用;也可由任务/页面配置动态设置)
2025-09-26 17:18:13 +08:00
private boolean enableDetection = true;
private ModelManager modelManager;
private YoloDetector detector;
// 解码/推理/发送解耦
2025-09-26 17:18:13 +08:00
private final OpenCVFrameConverter.ToMat toMat = new OpenCVFrameConverter.ToMat();
private final AtomicReference<Mat> latestFrame = new AtomicReference<>();
private final AtomicReference<List<Detection>> latestDetections =
new AtomicReference<>(java.util.Collections.emptyList());
2025-09-26 11:55:38 +08:00
// 窗口巡检控制
private volatile boolean windowMode = false;
private volatile long windowEndMs = 0L;
private Long currentJobId;
private Long currentDeviceId;
private DetectionListener detectionListener;
private final WindowStats stats = new WindowStats();
// 导出最近一次“叠好框的帧”用于截图存证
private final AtomicReference<Mat> latestAnnotatedFrame = new AtomicReference<>();
2025-09-26 11:55:38 +08:00
public MediaTransferFlvByJavacv(CameraDto cameraDto) {
super();
this.cameraDto = cameraDto;
}
public void setRunning(boolean running) {
boolean prev = this.running;
this.running = running;
// 如果是从 true -> false则按“关闭”处理
if (prev && !running) {
try {
closeMedia(); // 内部会 stopWindowIfAny()、关闭连接等
} catch (Exception ignore) {}
}
}
/** 推荐的新接口:显式停止并释放资源 */
public void stop() {
setRunning(false);
}
/* ===================== 外部控制 API ===================== */
2025-09-26 17:18:13 +08:00
public boolean isRunning() { return running; }
public boolean isGrabberStatus() { return grabberStatus; }
public boolean isRecorderStatus() { return recorderStatus; }
public void setEnableDetection(boolean enable) { this.enableDetection = enable; }
/**
* 开启一个窗口巡检持续 windowSeconds 期间每次推理回调 onDetections结束时 onWindowFinished
*/
public void attachDetectionListener(Long jobId, Long deviceId, int windowSeconds, DetectionListener listener) {
if (windowSeconds <= 0 || listener == null) return;
this.currentJobId = jobId;
this.currentDeviceId = deviceId;
this.detectionListener = listener;
this.windowMode = true;
long now = System.currentTimeMillis();
this.stats.setStartMs(now);
this.windowEndMs = now + windowSeconds * 1000L;
this.stats.setFrames(0);
this.stats.setDetectedFrames(0);
this.stats.setObjects(0);
this.stats.setMaxScore(0.0);
log.info("[job:{} device:{}] window started {}s", jobId, deviceId, windowSeconds);
}
/** 主动结束当前窗口(可用于任务被中断的场景) */
public void stopWindowIfAny() {
if (!windowMode) return;
this.windowMode = false;
stats.setEndMs(System.currentTimeMillis());
if (detectionListener != null && currentJobId != null && currentDeviceId != null) {
try {
detectionListener.onWindowFinished(currentJobId, currentDeviceId, cloneStats(stats));
} catch (Exception ignore) {}
}
currentJobId = null;
currentDeviceId = null;
detectionListener = null;
log.info("window finished (stopWindowIfAny)");
}
/** 导出最近一次“叠好框的帧”(深拷贝),用于截图/存证。调用方负责释放 Mat */
public Mat getLatestAnnotatedFrameCopy() {
Mat src = latestAnnotatedFrame.get();
if (src == null || src.empty()) return null;
Mat copy = new Mat(src.rows(), src.cols(), src.type());
src.copyTo(copy);
return copy;
}
/* ===================== 初始化推理 ===================== */
2025-09-26 17:18:13 +08:00
private void initDetectors() throws Exception {
if (!enableDetection) return;
2025-09-26 17:18:13 +08:00
modelManager = new ModelManager();
URL json = getClass().getResource("/models/models.json");
modelManager.load(json);
// 你可按需切换单模型或多模型并行
// detector = modelManager.get("person-helmet");
2025-09-26 17:18:13 +08:00
detector = new CompositeDetector(
"all-models",
java.util.List.of(
modelManager.get("person-helmet"),
modelManager.get("vehicle-plate")
),
2 // 并行度
2025-09-26 17:18:13 +08:00
);
log.info("YOLO detectors ready: {}", detector.name());
// 预热一次,避免前几帧“无框”
try {
Frame warm = grabber != null ? grabber.grabImage() : null;
if (warm != null) {
Mat wm = toMat.convert(warm);
if (wm != null && !wm.empty()) {
long t0 = System.currentTimeMillis();
List<Detection> dets = detector.detect(wm);
long cost = System.currentTimeMillis() - t0;
latestDetections.set(dets);
log.info("Detector warm-up OK, cost={}ms, dets={}", cost,
CollectionUtils.isEmpty(dets) ? 0 : dets.size());
}
}
} catch (Throwable e) {
log.warn("Detector warm-up failed: {}", e.getMessage());
}
2025-09-26 11:55:38 +08:00
}
/* ===================== 拉流/推流 ===================== */
2025-09-26 11:55:38 +08:00
protected boolean createGrabber() {
grabber = new FFmpegFrameGrabber(cameraDto.getUrl());
2025-09-26 16:12:51 +08:00
2025-09-26 17:18:13 +08:00
String fiveSecUs = "5000000";
String oneMb = "1048576";
2025-09-26 16:12:51 +08:00
2025-09-26 11:55:38 +08:00
grabber.setOption("threads", "1");
2025-09-26 17:18:13 +08:00
grabber.setOption("buffer_size", oneMb);
grabber.setOption("rw_timeout", fiveSecUs);
grabber.setOption("stimeout", fiveSecUs);
grabber.setOption("probesize", "1048576");
2025-09-26 17:18:13 +08:00
grabber.setOption("analyzeduration", fiveSecUs);
grabber.setOption("fflags", "nobuffer");
grabber.setOption("flags", "low_delay");
grabber.setOption("loglevel", "error");
2025-09-26 16:12:51 +08:00
if (cameraDto.getUrl().toLowerCase().startsWith("rtsp://")) {
grabber.setOption("rtsp_transport", "tcp");
2025-09-26 16:12:51 +08:00
grabber.setOption("allowed_media_types", "video");
2025-09-26 17:18:13 +08:00
grabber.setOption("max_delay", "500000");
2025-09-26 16:12:51 +08:00
grabber.setOption("user_agent", "Lavf/60");
2025-09-26 17:18:13 +08:00
} else if (cameraDto.getUrl().toLowerCase().startsWith("rtmp://")) {
2025-09-26 11:55:38 +08:00
grabber.setOption("rtmp_buffer", "1000");
2025-09-26 17:18:13 +08:00
} else if ("desktop".equalsIgnoreCase(cameraDto.getUrl())) {
2025-09-26 11:55:38 +08:00
grabber.setFormat("gdigrab");
2025-09-26 16:12:51 +08:00
grabber.setOption("draw_mouse", "1");
2025-09-26 11:55:38 +08:00
grabber.setNumBuffers(0);
grabber.setOption("fflags", "nobuffer");
grabber.setOption("framerate", "25");
grabber.setFrameRate(25);
}
try {
grabber.start();
log.info("启动拉流器成功: {}", cameraDto.getUrl());
2025-09-26 16:12:51 +08:00
return (grabberStatus = true);
2025-09-26 11:55:38 +08:00
} catch (FrameGrabber.Exception e) {
MediaService.cameras.remove(cameraDto.getMediaKey());
log.error("启动拉流器失败: {} ({})", cameraDto.getUrl(), e.getMessage());
2025-09-26 16:12:51 +08:00
return (grabberStatus = false);
2025-09-26 11:55:38 +08:00
}
}
protected boolean createTransterOrRecodeRecorder() {
// 启用检测时必须转码(需要像素级叠框)
2025-09-26 17:18:13 +08:00
if (enableDetection) transferFlag = false;
2025-09-26 11:55:38 +08:00
recorder = new FFmpegFrameRecorder(bos, grabber.getImageWidth(), grabber.getImageHeight(),
grabber.getAudioChannels());
recorder.setFormat("flv");
2025-09-26 17:18:13 +08:00
2025-09-26 11:55:38 +08:00
if (!transferFlag) {
// 转码:低延迟 H.264
2025-09-26 11:55:38 +08:00
recorder.setInterleaved(false);
recorder.setVideoOption("tune", "zerolatency");
recorder.setVideoOption("preset", "ultrafast");
recorder.setVideoOption("crf", "26");
recorder.setVideoOption("threads", "1");
2025-09-26 17:18:13 +08:00
recorder.setFrameRate(25);
recorder.setGopSize(25);
2025-09-26 11:55:38 +08:00
recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
recorder.setTrellis(1);
2025-09-26 17:18:13 +08:00
recorder.setMaxDelay(0);
2025-09-26 11:55:38 +08:00
try {
recorder.start();
return recorderStatus = true;
2025-09-26 17:18:13 +08:00
} catch (FrameRecorder.Exception e1) {
2025-09-26 11:55:38 +08:00
log.info("启动转码录制器失败", e1);
MediaService.cameras.remove(cameraDto.getMediaKey());
}
} else {
// 转复用(仅不叠框时)
2025-09-26 11:55:38 +08:00
recorder.setCloseOutputStream(false);
try {
recorder.start(grabber.getFormatContext());
return recorderStatus = true;
2025-09-26 17:18:13 +08:00
} catch (FrameRecorder.Exception e) {
log.warn("{} 启动转复用失败,自动切换转码", cameraDto.getUrl());
2025-09-26 11:55:38 +08:00
transferFlag = false;
2025-09-26 17:18:13 +08:00
try { recorder.stop(); } catch (FrameRecorder.Exception ignored) {}
2025-09-26 11:55:38 +08:00
if (createTransterOrRecodeRecorder()) {
log.error("{} 切换到转码模式", cameraDto.getUrl());
2025-09-26 11:55:38 +08:00
return true;
}
log.error("{} 切换转码模式失败", cameraDto.getUrl(), e);
2025-09-26 11:55:38 +08:00
}
}
return recorderStatus = false;
}
private boolean supportFlvFormatCodec() {
int vcodec = grabber.getVideoCodec();
int acodec = grabber.getAudioCodec();
return (cameraDto.getType() == 0)
&& ("desktop".equals(cameraDto.getUrl()) || avcodec.AV_CODEC_ID_H264 == vcodec
|| avcodec.AV_CODEC_ID_H263 == vcodec)
&& (avcodec.AV_CODEC_ID_AAC == acodec || avcodec.AV_CODEC_ID_AAC_LATM == acodec);
}
/* ===================== 主流程 ===================== */
2025-09-26 11:55:38 +08:00
protected void transferStream2Flv() {
try {
2025-09-26 17:18:13 +08:00
if (enableDetection) initDetectors();
} catch (Exception e) {
log.error("初始化检测模型失败:{}", e.getMessage(), e);
enableDetection = false; // 模型失败不影响推流
2025-09-26 11:55:38 +08:00
}
2025-09-26 17:18:13 +08:00
if (!createGrabber()) return;
if (!enableDetection) transferFlag = supportFlvFormatCodec();
if (!createTransterOrRecodeRecorder()) return;
try { grabber.flush(); } catch (FrameGrabber.Exception e) { log.debug("flush grabber fail", e); }
2025-09-26 11:55:38 +08:00
if (header == null) {
header = bos.toByteArray();
bos.reset();
}
running = true;
listenClient();
long startTime = 0;
long videoTS;
2025-09-26 11:55:38 +08:00
// === 解耦线程:解码 / 推理 ===
2025-09-26 17:18:13 +08:00
Thread tDecode = null, tInfer = null;
if (enableDetection) {
tDecode = new Thread(() -> {
while (running && grabberStatus) {
try {
Frame f = grabber.grabImage();
if (f == null) continue;
Mat m = toMat.convert(f);
if (m == null || m.empty()) continue;
Mat copy = new Mat(m.rows(), m.cols(), CV_8UC3);
m.copyTo(copy);
Mat old = latestFrame.getAndSet(copy);
if (old != null) old.release();
} catch (Exception e) {
log.debug("decode err: {}", e.getMessage());
}
}
}, "det-decode");
int inferFps = 15;
long period = 1_000_000_000L / inferFps;
tInfer = new Thread(() -> {
long next = System.nanoTime();
while (running && grabberStatus) {
long now = System.nanoTime();
if (now < next) { LockSupport.parkNanos(next - now); continue; }
next += period;
Mat src = latestFrame.get();
if (src == null || src.empty()) continue;
Mat snap = new Mat();
src.copyTo(snap);
2025-09-26 17:18:13 +08:00
try {
List<Detection> dets = detector.detect(snap);
latestDetections.set(dets);
// 窗口巡检期间:回调 onDetections
if (windowMode && detectionListener != null && currentJobId != null && currentDeviceId != null) {
long ts = System.currentTimeMillis();
try { detectionListener.onDetections(currentJobId, currentDeviceId, dets, ts); }
catch (Exception ignore) {}
}
2025-09-26 17:18:13 +08:00
} catch (Throwable e) {
log.debug("infer err: {}", e.getMessage());
} finally {
snap.release();
}
}
}, "det-infer");
tDecode.start();
tInfer.start();
}
// === 主发送循环 ===
2025-09-26 17:18:13 +08:00
for (; running && grabberStatus && recorderStatus; ) {
2025-09-26 11:55:38 +08:00
try {
if (transferFlag) {
// 仅转复用(未叠框)
2025-09-26 11:55:38 +08:00
long startGrab = System.currentTimeMillis();
AVPacket pkt = grabber.grabPacket();
if ((System.currentTimeMillis() - startGrab) > 5000) {
log.info("{} 网络异常(复用)", cameraDto.getUrl());
2025-09-26 11:55:38 +08:00
closeMedia();
break;
}
2025-09-26 17:18:13 +08:00
if (pkt != null && !pkt.isNull()) {
if (startTime == 0) startTime = System.currentTimeMillis();
2025-09-26 11:55:38 +08:00
videoTS = 1000 * (System.currentTimeMillis() - startTime);
2025-09-26 17:18:13 +08:00
if (videoTS > recorder.getTimestamp()) recorder.setTimestamp(videoTS);
2025-09-26 11:55:38 +08:00
recorder.recordPacket(pkt);
}
} else {
// 转码(可叠框)
2025-09-26 11:55:38 +08:00
long startGrab = System.currentTimeMillis();
2025-09-26 17:18:13 +08:00
Frame frame;
2025-09-26 17:18:13 +08:00
if (enableDetection) {
Mat src = latestFrame.get();
if (src == null || src.empty()) continue;
// 叠加最近一次检测结果
List<Detection> dets = latestDetections.get();
if (!CollectionUtils.isEmpty(dets)) {
Overlay.draw(dets, src);
}
// 更新“最近叠好框的帧”用于存证
updateLatestAnnotated(src);
// 统计(仅窗口巡检时)
if (windowMode) updateStats(dets);
// 窗口结束判定
if (windowMode && System.currentTimeMillis() >= windowEndMs) {
finishWindow();
}
2025-09-26 17:18:13 +08:00
frame = toMat.convert(src);
} else {
frame = grabber.grab();
}
2025-09-26 11:55:38 +08:00
if ((System.currentTimeMillis() - startGrab) > 5000) {
log.info("{} 网络异常(转码)", cameraDto.getUrl());
2025-09-26 11:55:38 +08:00
closeMedia();
break;
}
if (frame != null) {
long now = System.currentTimeMillis();
if (startTime == 0) startTime = now;
videoTS = 1000 * (now - startTime);
2025-09-26 17:18:13 +08:00
if (videoTS > recorder.getTimestamp()) recorder.setTimestamp(videoTS);
2025-09-26 11:55:38 +08:00
recorder.record(frame);
}
}
} catch (FrameGrabber.Exception e) {
grabberStatus = false;
MediaService.cameras.remove(cameraDto.getMediaKey());
2025-09-26 17:18:13 +08:00
} catch (FrameRecorder.Exception e) {
2025-09-26 11:55:38 +08:00
recorderStatus = false;
MediaService.cameras.remove(cameraDto.getMediaKey());
}
// 输出缓存到客户端
2025-09-26 11:55:38 +08:00
if (bos.size() > 0) {
byte[] b = bos.toByteArray();
bos.reset();
sendFrameData(b);
}
}
2025-09-26 17:18:13 +08:00
// === 收尾 ===
2025-09-26 11:55:38 +08:00
try {
2025-09-26 17:18:13 +08:00
if (detector != null) try { detector.close(); } catch (Exception ignored) {}
if (modelManager != null) try { modelManager.close(); } catch (Exception ignored) {}
if (recorder != null) recorder.close();
if (grabber != null) grabber.close();
2025-09-26 11:55:38 +08:00
bos.close();
2025-09-26 17:18:13 +08:00
} catch (Exception ignored) {
2025-09-26 11:55:38 +08:00
} finally {
2025-09-26 17:18:13 +08:00
Mat m = latestFrame.getAndSet(null);
if (m != null) m.release();
Mat a = latestAnnotatedFrame.getAndSet(null);
if (a != null) a.release();
2025-09-26 11:55:38 +08:00
closeMedia();
}
log.info("关闭媒体流-javacv: {}", cameraDto.getUrl());
}
/* ===================== 统计 / 窗口结束 ===================== */
private void updateStats(List<Detection> dets) {
stats.setFrames(stats.getFrames() + 1);
if (dets != null && !dets.isEmpty()) {
stats.setDetectedFrames(stats.getDetectedFrames() + 1);
stats.setObjects(stats.getObjects() + dets.size());
double localMax = dets.stream().mapToDouble(Detection::conf).max().orElse(0.0);
if (localMax > stats.getMaxScore()) stats.setMaxScore(localMax);
}
}
private void finishWindow() {
windowMode = false;
stats.setEndMs(System.currentTimeMillis());
if (detectionListener != null && currentJobId != null && currentDeviceId != null) {
try { detectionListener.onWindowFinished(currentJobId, currentDeviceId, cloneStats(stats)); }
catch (Exception ignore) {}
}
currentJobId = null;
currentDeviceId = null;
detectionListener = null;
log.info("window finished (timeout)");
}
private static WindowStats cloneStats(WindowStats s) {
WindowStats c = new WindowStats();
c.setFrames(s.getFrames());
c.setDetectedFrames(s.getDetectedFrames());
c.setObjects(s.getObjects());
c.setMaxScore(s.getMaxScore());
c.setStartMs(s.getStartMs());
c.setEndMs(s.getEndMs());
return c;
}
private void updateLatestAnnotated(Mat src) {
if (src == null || src.empty()) return;
Mat copy = new Mat(src.rows(), src.cols(), src.type());
src.copyTo(copy);
Mat old = latestAnnotatedFrame.getAndSet(copy);
if (old != null) old.release();
2025-09-26 11:55:38 +08:00
}
/* ===================== 网络发送/连接管理 ===================== */
2025-09-26 11:55:38 +08:00
private void sendFrameData(byte[] data) {
// ws
for (Map.Entry<String, ChannelHandlerContext> entry : wsClients.entrySet()) {
try {
if (entry.getValue().channel().isWritable()) {
entry.getValue().writeAndFlush(new BinaryWebSocketFrame(Unpooled.copiedBuffer(data)));
} else {
wsClients.remove(entry.getKey());
hasClient();
}
2025-09-26 17:18:13 +08:00
} catch (Exception e) {
2025-09-26 11:55:38 +08:00
wsClients.remove(entry.getKey());
hasClient();
log.debug("ws send err", e);
2025-09-26 11:55:38 +08:00
}
}
// http
for (Map.Entry<String, ChannelHandlerContext> entry : httpClients.entrySet()) {
try {
if (entry.getValue().channel().isWritable()) {
entry.getValue().writeAndFlush(Unpooled.copiedBuffer(data));
} else {
httpClients.remove(entry.getKey());
hasClient();
}
2025-09-26 17:18:13 +08:00
} catch (Exception e) {
2025-09-26 11:55:38 +08:00
httpClients.remove(entry.getKey());
hasClient();
log.debug("http send err", e);
2025-09-26 11:55:38 +08:00
}
}
}
public void hasClient() {
int newHcSize = httpClients.size();
int newWcSize = wsClients.size();
if (hcSize != newHcSize || wcSize != newWcSize) {
hcSize = newHcSize;
wcSize = newWcSize;
log.info("{} http连接数{}, ws连接数{}", cameraDto.getUrl(), newHcSize, newWcSize);
2025-09-26 11:55:38 +08:00
}
2025-09-26 17:18:13 +08:00
if (!cameraDto.isAutoClose()) return;
2025-09-26 11:55:38 +08:00
if (httpClients.isEmpty() && wsClients.isEmpty()) {
if (noClient > cameraDto.getNoClientsDuration()) {
closeMedia();
} else {
noClient += 1000;
}
} else {
noClient = 0;
}
}
public void listenClient() {
2025-09-26 17:18:13 +08:00
listenThread = new Thread(() -> {
while (running) {
hasClient();
try { Thread.sleep(1000); } catch (InterruptedException ignored) {}
2025-09-26 11:55:38 +08:00
}
});
listenThread.start();
}
private void closeMedia() {
// 结束窗口(如果还在)
stopWindowIfAny();
2025-09-26 11:55:38 +08:00
running = false;
MediaService.cameras.remove(cameraDto.getMediaKey());
for (Map.Entry<String, ChannelHandlerContext> entry : wsClients.entrySet()) {
try { entry.getValue().close(); } catch (Exception ignored) {} finally { wsClients.remove(entry.getKey()); }
2025-09-26 11:55:38 +08:00
}
for (Map.Entry<String, ChannelHandlerContext> entry : httpClients.entrySet()) {
try { entry.getValue().close(); } catch (Exception ignored) {} finally { httpClients.remove(entry.getKey()); }
2025-09-26 11:55:38 +08:00
}
}
public void addClient(ChannelHandlerContext ctx, ClientType ctype) {
int timeout = 0;
while (true) {
try {
if (header != null) {
try {
if (ctx.channel().isWritable()) {
if (ClientType.HTTP.getType() == ctype.getType()) {
ChannelFuture future = ctx.writeAndFlush(Unpooled.copiedBuffer(header));
future.addListener(new GenericFutureListener<Future<? super Void>>() {
@Override
2025-09-26 17:18:13 +08:00
public void operationComplete(Future<? super Void> future) {
2025-09-26 11:55:38 +08:00
if (future.isSuccess()) {
httpClients.put(ctx.channel().id().toString(), ctx);
}
}
});
} else if (ClientType.WEBSOCKET.getType() == ctype.getType()) {
2025-09-26 17:18:13 +08:00
ChannelFuture future = ctx.writeAndFlush(
new BinaryWebSocketFrame(Unpooled.copiedBuffer(header)));
2025-09-26 11:55:38 +08:00
future.addListener(new GenericFutureListener<Future<? super Void>>() {
@Override
2025-09-26 17:18:13 +08:00
public void operationComplete(Future<? super Void> future) {
2025-09-26 11:55:38 +08:00
if (future.isSuccess()) {
wsClients.put(ctx.channel().id().toString(), ctx);
}
}
});
}
}
2025-09-26 17:18:13 +08:00
} catch (Exception e) {
log.debug("send header err", e);
2025-09-26 11:55:38 +08:00
}
break;
}
Thread.sleep(50);
timeout += 50;
2025-09-26 17:18:13 +08:00
if (timeout > 30000) break;
} catch (Exception e) {
log.debug("addClient err", e);
2025-09-26 11:55:38 +08:00
}
}
}
@Override
public void run() {
transferStream2Flv();
}
2025-09-26 11:55:38 +08:00
}