}
try {
String content = msg.toString();
- if (content.startsWith("--myboundary")) {
+ if (content.startsWith("--myboundary") || content.startsWith("-- myboundary")) {
processEvent(content);
return;
}
}
}
if (contentType.contains("multipart")) {
+ boundary = Helper.searchString(contentType, "boundary=");
if (mjpegUri.equals(requestUrl)) {
if (msg instanceof HttpMessage) {
// very start of stream only
mjpegContentType = contentType;
CameraServlet localServlet = servlet;
if (localServlet != null) {
- localServlet.openStreams.updateContentType(contentType);
+ logger.debug("Setting Content-Type to:{}", contentType);
+ localServlet.openStreams.updateContentType(contentType, boundary);
}
}
- } else {
- boundary = Helper.searchString(contentType, "boundary=");
}
} else if (contentType.contains("image/jp")) {
if (bytesToRecieve == 0) {
}
public void openCamerasStream() {
+ if (mjpegUri.isEmpty() || "ffmpeg".equals(mjpegUri)) {
+ setupFfmpegFormat(FFmpegFormat.MJPEG);
+ return;
+ }
closeChannel(getTinyUrl(mjpegUri));
- mainEventLoopGroup.schedule(this::openMjpegStream, 0, TimeUnit.MILLISECONDS);
+ // Dahua cameras crash if you refresh (close and open) the stream without this delay.
+ mainEventLoopGroup.schedule(this::openMjpegStream, 300, TimeUnit.MILLISECONDS);
}
private void openMjpegStream() {
pollCameraJob = threadPool.scheduleWithFixedDelay(this::pollCameraRunnable, 1000, 8000, TimeUnit.MILLISECONDS);
+ // auto restart mjpeg stream now camera is back online.
+ CameraServlet localServlet = servlet;
+ if (localServlet != null && !localServlet.openStreams.isEmpty()) {
+ openCamerasStream();
+ }
+
if (!rtspUri.isEmpty()) {
updateState(CHANNEL_RTSP_URL, new StringType(rtspUri));
}
}
void pollingCameraConnection() {
+ keepMjpegRunning();
if (thing.getThingTypeUID().getId().equals(GENERIC_THING)) {
if (rtspUri.isEmpty()) {
logger.warn("Binding has not been supplied with a FFmpeg Input URL, so some features will not work.");
// Only use ONVIF events if it is not an API camera.
onvifCamera.connect(thing.getThingTypeUID().getId().equals(ONVIF_THING));
}
- cameraConnectionJob = threadPool.scheduleWithFixedDelay(this::pollingCameraConnection, 4, 30, TimeUnit.SECONDS);
+ cameraConnectionJob = threadPool.scheduleWithFixedDelay(this::pollingCameraConnection, 4, 8, TimeUnit.SECONDS);
+ }
+
+ private void keepMjpegRunning() {
+ CameraServlet localServlet = servlet;
+ if (localServlet != null && !localServlet.openStreams.isEmpty()) {
+ if (!mjpegUri.isEmpty() && !"ffmpeg".equals(mjpegUri)) {
+ localServlet.openStreams.queueFrame(("--" + localServlet.openStreams.boundary + "\r\n\r\n").getBytes());
+ }
+ localServlet.openStreams.queueFrame(getSnapshot());
+ }
}
// What the camera needs to re-connect if the initialize() is not called.
}
} while (true);
case "/ipcamera.mjpeg":
- if (handler.mjpegUri.isEmpty() || "ffmpeg".equals(handler.mjpegUri)) {
- if (openStreams.isEmpty()) {
- handler.setupFfmpegFormat(FFmpegFormat.MJPEG);
- }
- output = new StreamOutput(resp);
- openStreams.addStream(output);
- } else if (openStreams.isEmpty()) {
+ if (openStreams.isEmpty()) {
logger.debug("First stream requested, opening up stream from camera");
handler.openCamerasStream();
- output = new StreamOutput(resp, handler.mjpegContentType);
- openStreams.addStream(output);
+ if (handler.mjpegUri.isEmpty() || "ffmpeg".equals(handler.mjpegUri)) {
+ output = new StreamOutput(resp);
+ } else {
+ output = new StreamOutput(resp, handler.mjpegContentType);
+ }
} else {
- ChannelTracking tracker = handler.channelTrackingMap.get(handler.mjpegUri);
- if (tracker == null || !tracker.getChannel().isOpen()) {
- logger.debug("Not the first stream requested but the stream from camera was closed");
- handler.openCamerasStream();
- openStreams.closeAllStreams();
+ if (handler.mjpegUri.isEmpty() || "ffmpeg".equals(handler.mjpegUri)) {
+ output = new StreamOutput(resp);
+ } else {
+ ChannelTracking tracker = handler.channelTrackingMap.get(handler.mjpegUri);
+ if (tracker == null || !tracker.getChannel().isOpen()) {
+ logger.debug("Not the first stream requested but the stream from camera was closed");
+ handler.openCamerasStream();
+ openStreams.closeAllStreams();
+ }
+ output = new StreamOutput(resp, handler.mjpegContentType);
}
- output = new StreamOutput(resp, handler.mjpegContentType);
- openStreams.addStream(output);
}
+ openStreams.addStream(output);
do {
try {
output.sendFrame();
@NonNullByDefault
public class OpenStreams {
private List<StreamOutput> openStreams = Collections.synchronizedList(new ArrayList<StreamOutput>());
+ public String boundary = "thisMjpegStream";
public synchronized void addStream(StreamOutput stream) {
openStreams.add(stream);
return openStreams.isEmpty();
}
- public synchronized void updateContentType(String contentType) {
+ public synchronized void updateContentType(String contentType, String boundary) {
+ this.boundary = boundary;
for (StreamOutput stream : openStreams) {
stream.updateContentType(contentType);
}
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jdt.annotation.NonNullByDefault;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The {@link StreamOutput} Streams mjpeg out to a client
@NonNullByDefault
public class StreamOutput {
+ public final Logger logger = LoggerFactory.getLogger(getClass());
private final HttpServletResponse response;
private final String boundary;
private String contentType;
private final ServletOutputStream output;
- private BlockingQueue<byte[]> fifo = new ArrayBlockingQueue<byte[]>(6);
+ private BlockingQueue<byte[]> fifo = new ArrayBlockingQueue<byte[]>(30);
private boolean connected = false;
public boolean isSnapshotBased = false;
try {
fifo.add(frame);
} catch (IllegalStateException e) {
+ logger.debug("FIFO buffer has run out of space:{}", e.getMessage());
fifo.remove();
fifo.add(frame);
}