/**
* The {@link Ffmpeg} class is responsible for handling multiple ffmpeg conversions which are used for many tasks
*
- *
* @author Matthew Skinner - Initial contribution
*/
public void setKeepAlive(int numberOfEightSeconds) {
// We poll every 8 seconds due to mjpeg stream requirement.
if (keepAlive == -1 && numberOfEightSeconds > 1) {
- return;// When set to -1 this will not auto turn off stream.
+ return; // When set to -1 this will not auto turn off stream.
}
keepAlive = numberOfEightSeconds;
}
public void checkKeepAlive() {
if (keepAlive == 1) {
stopConverting();
- } else if (keepAlive <= -1 && !getIsAlive()) {
+ } else if (keepAlive <= -1 && !isAlive()) {
logger.warn("HLS stream was not running, restarting it now.");
startConverting();
}
public void run() {
try {
process = Runtime.getRuntime().exec(commandArrayList.toArray(new String[commandArrayList.size()]));
- Process localProcess = process;
- if (localProcess != null) {
- InputStream errorStream = localProcess.getErrorStream();
- InputStreamReader errorStreamReader = new InputStreamReader(errorStream);
- BufferedReader bufferedReader = new BufferedReader(errorStreamReader);
- String line = null;
- while ((line = bufferedReader.readLine()) != null) {
- logger.trace("{}", line);
- switch (format) {
- case RTSP_ALARMS:
- if (line.contains("lavfi.")) {
- // When the number of pixels that change are below the noise floor we need to look
- // across frames to confirm it is motion and not noise.
- if (countOfMotions < 10) {// Stop increasing otherwise it takes too long to go OFF
- countOfMotions++;
+
+ InputStream errorStream = process.getErrorStream();
+ InputStreamReader errorStreamReader = new InputStreamReader(errorStream);
+ BufferedReader bufferedReader = new BufferedReader(errorStreamReader);
+ String line = null;
+ while ((line = bufferedReader.readLine()) != null) {
+ logger.trace("{}", line);
+ switch (format) {
+ case RTSP_ALARMS:
+ if (line.contains("lavfi.")) {
+ // When the number of pixels that change are below the noise floor we need to look
+ // across frames to confirm it is motion and not noise.
+ if (countOfMotions < 10) { // Stop increasing otherwise it takes too long to go OFF
+ countOfMotions++;
+ }
+ if (countOfMotions > 9) {
+ ipCameraHandler.motionDetected(CHANNEL_FFMPEG_MOTION_ALARM);
+ } else if (countOfMotions > 4 && ipCameraHandler.motionThreshold.intValue() > 10) {
+ ipCameraHandler.motionDetected(CHANNEL_FFMPEG_MOTION_ALARM);
+ } else if (countOfMotions > 3 && ipCameraHandler.motionThreshold.intValue() > 15) {
+ ipCameraHandler.motionDetected(CHANNEL_FFMPEG_MOTION_ALARM);
+ } else if (countOfMotions > 2 && ipCameraHandler.motionThreshold.intValue() > 30) {
+ ipCameraHandler.motionDetected(CHANNEL_FFMPEG_MOTION_ALARM);
+ } else if (countOfMotions > 0 && ipCameraHandler.motionThreshold.intValue() > 89) {
+ ipCameraHandler.motionDetected(CHANNEL_FFMPEG_MOTION_ALARM);
+ countOfMotions = 4; // Used to debounce the Alarm.
+ }
+ } else if (line.contains("speed=")) {
+ if (countOfMotions > 0) {
+ if (ipCameraHandler.motionThreshold.intValue() > 89) {
+ countOfMotions--;
}
- if (countOfMotions > 9) {
- ipCameraHandler.motionDetected(CHANNEL_FFMPEG_MOTION_ALARM);
- } else if (countOfMotions > 4 && ipCameraHandler.motionThreshold.intValue() > 10) {
- ipCameraHandler.motionDetected(CHANNEL_FFMPEG_MOTION_ALARM);
- } else if (countOfMotions > 3 && ipCameraHandler.motionThreshold.intValue() > 15) {
- ipCameraHandler.motionDetected(CHANNEL_FFMPEG_MOTION_ALARM);
- } else if (countOfMotions > 2 && ipCameraHandler.motionThreshold.intValue() > 30) {
- ipCameraHandler.motionDetected(CHANNEL_FFMPEG_MOTION_ALARM);
- } else if (countOfMotions > 0 && ipCameraHandler.motionThreshold.intValue() > 89) {
- ipCameraHandler.motionDetected(CHANNEL_FFMPEG_MOTION_ALARM);
- countOfMotions = 4;// Used to debounce the Alarm.
+ if (ipCameraHandler.motionThreshold.intValue() > 10) {
+ countOfMotions -= 2;
+ } else {
+ countOfMotions -= 4;
}
- } else if (line.contains("speed=")) {
- if (countOfMotions > 0) {
- if (ipCameraHandler.motionThreshold.intValue() > 89) {
- countOfMotions--;
- }
- if (ipCameraHandler.motionThreshold.intValue() > 10) {
- countOfMotions -= 2;
- } else {
- countOfMotions -= 4;
- }
- if (countOfMotions <= 0) {
- ipCameraHandler.noMotionDetected(CHANNEL_FFMPEG_MOTION_ALARM);
- countOfMotions = 0;
- }
+ if (countOfMotions <= 0) {
+ ipCameraHandler.noMotionDetected(CHANNEL_FFMPEG_MOTION_ALARM);
+ countOfMotions = 0;
}
- } else if (line.contains("silence_start")) {
- ipCameraHandler.noAudioDetected();
- } else if (line.contains("silence_end")) {
- ipCameraHandler.audioDetected();
}
- case MJPEG:
- case SNAPSHOT:
- notFrozen = true;// RTSP_ALARMS, MJPEG and SNAPSHOT all set this to true, no break.
- break;
- default:
- break;
- }
+ } else if (line.contains("silence_start")) {
+ ipCameraHandler.noAudioDetected();
+ } else if (line.contains("silence_end")) {
+ ipCameraHandler.audioDetected();
+ }
+ case MJPEG:
+ case SNAPSHOT:
+ notFrozen = true; // RTSP_ALARMS, MJPEG and SNAPSHOT all set this to true, no break.
+ break;
+ default:
+ break;
}
}
} catch (IOException e) {
- logger.warn("An IO error occured trying to start FFmpeg:{}", e.getMessage());
+ logger.warn("An IO error occurred trying to start FFmpeg: {}", e.getMessage());
} finally {
switch (format) {
case GIF:
if (!ipCameraFfmpegThread.isAlive()) {
ipCameraFfmpegThread = new IpCameraFfmpegThread();
if (!password.isEmpty()) {
- logger.debug("Starting ffmpeg with this command now:{}",
+ logger.debug("Starting ffmpeg with this command now: {}",
ffmpegCommand.replaceAll(password, "********"));
} else {
- logger.debug("Starting ffmpeg with this command now:{}", ffmpegCommand);
+ logger.debug("Starting ffmpeg with this command now: {}", ffmpegCommand);
}
ipCameraFfmpegThread.start();
if (format.equals(FFmpegFormat.HLS)) {
}
}
- public boolean getIsAlive() {
+ public boolean isAlive() {
Process localProcess = process;
if (localProcess != null) {
if (localProcess.isAlive() && notFrozen) {
public void stopConverting() {
if (ipCameraFfmpegThread.isAlive()) {
- logger.debug("Stopping ffmpeg {} now when keepalive is:{}", format, keepAlive);
+ logger.debug("Stopping ffmpeg {} now when keepalive is: {}", format, keepAlive);
Process localProcess = process;
if (localProcess != null) {
localProcess.destroyForcibly();
ipCameraHandler.logger.debug("The GetAiStateResponse could not be parsed");
return;
}
- if (aiResponse[0].value.dog_cat != null) {
- if (aiResponse[0].value.dog_cat.alarm_state == 1) {
- ipCameraHandler.setChannelState(CHANNEL_ANIMAL_ALARM, OnOffType.ON);
- } else {
- ipCameraHandler.setChannelState(CHANNEL_ANIMAL_ALARM, OnOffType.OFF);
- }
+ if (aiResponse[0].value.dog_cat.alarm_state == 1) {
+ ipCameraHandler.setChannelState(CHANNEL_ANIMAL_ALARM, OnOffType.ON);
+ } else {
+ ipCameraHandler.setChannelState(CHANNEL_ANIMAL_ALARM, OnOffType.OFF);
}
if (aiResponse[0].value.face.alarm_state == 1) {
ipCameraHandler.setChannelState(CHANNEL_FACE_DETECTED, OnOffType.ON);
*
* @author Matthew Skinner - Initial contribution
*/
-
@NonNullByDefault
public class IpCameraGroupHandler extends BaseThingHandler {
private final Logger logger = LoggerFactory.getLogger(getClass());
*
* @author Matthew Skinner - Initial contribution
*/
-
@NonNullByDefault
public class IpCameraHandler extends BaseThingHandler {
public final Logger logger = LoggerFactory.getLogger(getClass());
mjpegContentType = contentType;
CameraServlet localServlet = servlet;
if (localServlet != null) {
- logger.debug("Setting Content-Type to:{}", contentType);
+ logger.debug("Setting Content-Type to: {}", contentType);
localServlet.openStreams.updateContentType(contentType, boundary);
}
}
return; // don't auto close this as it is for the alarms.
}
}
- logger.debug("Closing an idle channel for camera:{}", cameraConfig.getIp());
+ logger.debug("Closing an idle channel for camera: {}", cameraConfig.getIp());
ctx.close();
}
}
}
private void checkCameraConnection() {
- if (snapshotPolling) {// Currently polling a real URL for snapshots, so camera must be online.
+ if (snapshotPolling) { // Currently polling a real URL for snapshots, so camera must be online.
return;
- } else if (ffmpegSnapshotGeneration) {// Use RTSP stream creating snapshots to know camera is online.
+ } else if (ffmpegSnapshotGeneration) { // Use RTSP stream creating snapshots to know camera is online.
Ffmpeg localSnapshot = ffmpegSnapshot;
- if (localSnapshot != null && !localSnapshot.getIsAlive()) {
- cameraCommunicationError("FFmpeg Snapshots Stopped: Check your camera can be reached.");
+ if (localSnapshot != null && !localSnapshot.isAlive()) {
+ cameraCommunicationError("FFmpeg Snapshots Stopped: Check that your camera can be reached.");
return;
}
- return;// ffmpeg snapshot stream is still alive
+ return; // ffmpeg snapshot stream is still alive
}
// if ONVIF cam also use connection state which is updated by regular messages to camera
if (!basicAuth.isEmpty()) {
if (useDigestAuth) {
- logger.warn("Camera at IP:{} had both Basic and Digest set to be used", cameraConfig.getIp());
+ logger.warn("Camera at IP: {} had both Basic and Digest set to be used", cameraConfig.getIp());
setBasicAuth(false);
} else {
request.headers().set("Authorization", "Basic " + basicAuth);
channelTrackingMap.remove(channelTracking.getRequestUrl());
}
if (channelTracking.getChannel().equals(channel)) {
- logger.debug("Open channel to camera is used for URL:{}", channelTracking.getRequestUrl());
+ logger.debug("Open channel to camera is used for URL: {}", channelTracking.getRequestUrl());
oldChannel = false;
}
}
ffmpegRecord = new Ffmpeg(this, format, cameraConfig.getFfmpegLocation(), inputOptions, rtspUri,
cameraConfig.getMp4OutOptions(), cameraConfig.getFfmpegOutput() + mp4Filename + ".mp4",
cameraConfig.getUser(), cameraConfig.getPassword());
- Ffmpeg localRecord = ffmpegRecord;
- if (localRecord != null) {
- localRecord.startConverting();
- if (mp4History.isEmpty()) {
- mp4History = mp4Filename;
- } else if (!"ipcamera".equals(mp4Filename)) {
- mp4History = mp4Filename + "," + mp4History;
- if (mp4HistoryLength > 49) {
- int endIndex = mp4History.lastIndexOf(",");
- mp4History = mp4History.substring(0, endIndex);
- }
+ ffmpegRecord.startConverting();
+ if (mp4History.isEmpty()) {
+ mp4History = mp4Filename;
+ } else if (!"ipcamera".equals(mp4Filename)) {
+ mp4History = mp4Filename + "," + mp4History;
+ if (mp4HistoryLength > 49) {
+ int endIndex = mp4History.lastIndexOf(",");
+ mp4History = mp4History.substring(0, endIndex);
}
}
setChannelState(CHANNEL_MP4_HISTORY, new StringType(mp4History));
}
ffmpegRtspHelper = new Ffmpeg(this, format, cameraConfig.getFfmpegLocation(), inputOptions, input,
filterOptions, "-f null -", cameraConfig.getUser(), cameraConfig.getPassword());
- localAlarms = ffmpegRtspHelper;
- if (localAlarms != null) {
- localAlarms.startConverting();
- }
+ ffmpegRtspHelper.startConverting();
break;
case MJPEG:
if (ffmpegMjpeg == null) {
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.COMMUNICATION_ERROR,
"Camera refused connection on ONVIF ports.");
}
- logger.debug("About to connect to the IP Camera using the ONVIF PORT at IP:{}:{}", cameraConfig.getIp(),
+ logger.debug("About to connect to the IP Camera using the ONVIF PORT at IP: {}:{}", cameraConfig.getIp(),
cameraConfig.getOnvifPort());
onvifCamera.connect(thing.getThingTypeUID().getId().equals(ONVIF_THING));
return;
}
public void cameraConfigError(String reason) {
- // wont try to reconnect again due to a config error being the cause.
+ // won't try to reconnect again due to a config error being the cause.
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.CONFIGURATION_ERROR, reason);
dispose();
}
public void cameraCommunicationError(String reason) {
// will try to reconnect again as camera may be rebooting.
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.COMMUNICATION_ERROR, reason);
- if (isOnline) {// if already offline dont try reconnecting in 6 seconds, we want 30sec wait.
+ if (isOnline) { // if already offline dont try reconnecting in 6 seconds, we want 30sec wait.
resetAndRetryConnecting();
}
}
}
if (ffmpegMotionAlarmEnabled || ffmpegAudioAlarmEnabled) {
localFfmpeg = ffmpegRtspHelper;
- if (localFfmpeg == null || !localFfmpeg.getIsAlive()) {
+ if (localFfmpeg == null || !localFfmpeg.isAlive()) {
setupFfmpegFormat(FFmpegFormat.RTSP_ALARMS);
}
}
// check if the thread has frozen due to camera doing a soft reboot
localFfmpeg = ffmpegMjpeg;
- if (localFfmpeg != null && !localFfmpeg.getIsAlive()) {
+ if (localFfmpeg != null && !localFfmpeg.isAlive()) {
logger.debug("MJPEG was not being produced by FFmpeg when it should have been, restarting FFmpeg.");
setupFfmpegFormat(FFmpegFormat.MJPEG);
}
localFfmpeg.stopConverting();
ffmpegSnapshot = null;
}
- if (!thing.getThingTypeUID().getId().equals(GENERIC_THING)) {// generic cameras do not have ONVIF support
+ if (!thing.getThingTypeUID().getId().equals(GENERIC_THING)) { // generic cameras do not have ONVIF support
onvifCamera.disconnect();
}
openChannels.close();
/**
* The {@link OnvifCodec} is used by Netty to decode Onvif traffic into message Strings.
*
- *
* @author Matthew Skinner - Initial contribution
*/
@NonNullByDefault
}
if (evt instanceof IdleStateEvent) {
IdleStateEvent e = (IdleStateEvent) evt;
- logger.trace("IdleStateEvent received {}", e.state());
+ logger.trace("IdleStateEvent received: {}", e.state());
onvifConnection.setIsConnected(false);
ctx.close();
} else {
- logger.trace("Other ONVIF netty channel event occured {}", evt);
+ logger.trace("Other ONVIF netty channel event occurred: {}", evt);
}
}
/**
* The {@link OnvifConnection} This is a basic Netty implementation for connecting and communicating to ONVIF cameras.
*
- *
- *
* @author Matthew Skinner - Initial contribution
*/
private Float tiltRangeMax = 1.0f;
private Float zoomMin = 0.0f;
private Float zoomMax = 1.0f;
- // These hold the PTZ values for updating Openhabs controls in 0-100 range
+ // These hold the PTZ values for updating openHABs controls in 0-100 range
private Float currentPanPercentage = 0.0f;
private Float currentTiltPercentage = 0.0f;
private Float currentZoomPercentage = 0.0f;
}
public void processReply(String message) {
- logger.trace("Onvif reply is:{}", message);
+ logger.trace("ONVIF reply is: {}", message);
if (message.contains("PullMessagesResponse")) {
eventRecieved(message);
} else if (message.contains("RenewResponse")) {
setIsConnected(true);
sendOnvifRequest(RequestType.GetCapabilities, deviceXAddr);
parseDateAndTime(message);
- logger.debug("Openhabs UTC dateTime is:{}", getUTCdateTime());
+ logger.debug("openHAB UTC dateTime is: {}", getUTCdateTime());
} else if (message.contains("GetCapabilitiesResponse")) {// 2nd to be sent.
parseXAddr(message);
sendOnvifRequest(RequestType.GetProfiles, mediaXAddr);
logger.debug("ptzNodeToken={}", ptzNodeToken);
sendPTZRequest(RequestType.GetConfigurations);
} else if (message.contains("GetDeviceInformationResponse")) {
- logger.debug("GetDeviceInformationResponse recieved");
+ logger.debug("GetDeviceInformationResponse received");
} else if (message.contains("GetSnapshotUriResponse")) {
snapshotUri = removeIPfromUrl(Helper.fetchXML(message, ":MediaUri", ":Uri"));
logger.debug("GetSnapshotUri:{}", snapshotUri);
index = url.indexOf("/", index + 2);
}
if (index == -1) {
- logger.debug("We hit an issue parsing url:{}", url);
+ logger.debug("We hit an issue parsing url: {}", url);
return "";
}
return url.substring(index);
if (startIndex != -1 && endIndex != -1) {
return url.substring(startIndex, endIndex);
}
- logger.debug("We hit an issue extracting IP:PORT from url:{}", url);
+ logger.debug("We hit an issue extracting IP:PORT from url: {}", url);
return "";
}
String temp = Helper.fetchXML(message, "<tt:Device", "tt:XAddr");
if (!temp.isEmpty()) {
deviceXAddr = temp;
- logger.debug("deviceXAddr:{}", deviceXAddr);
+ logger.debug("deviceXAddr: {}", deviceXAddr);
}
temp = Helper.fetchXML(message, "<tt:Events", "tt:XAddr");
if (!temp.isEmpty()) {
subscriptionXAddr = eventXAddr = temp;
- logger.debug("eventsXAddr:{}", eventXAddr);
+ logger.debug("eventsXAddr: {}", eventXAddr);
}
temp = Helper.fetchXML(message, "<tt:Media", "tt:XAddr");
if (!temp.isEmpty()) {
mediaXAddr = temp;
- logger.debug("mediaXAddr:{}", mediaXAddr);
+ logger.debug("mediaXAddr: {}", mediaXAddr);
}
ptzXAddr = Helper.fetchXML(message, "<tt:PTZ", "tt:XAddr");
}
ipCameraHandler.removeChannels(removeChannels);
} else {
- logger.debug("ptzXAddr:{}", ptzXAddr);
+ logger.debug("ptzXAddr: {}", ptzXAddr);
}
}
String day = Helper.fetchXML(message, "UTCDateTime", "Day>");
String month = Helper.fetchXML(message, "UTCDateTime", "Month>");
String year = Helper.fetchXML(message, "UTCDateTime", "Year>");
- logger.debug("Cameras UTC dateTime is:{}-{}-{}T{}:{}:{}", year, month, day, hour, minute, second);
+ logger.debug("Camera UTC dateTime is: {}-{}-{}T{}:{}:{}", year, month, day, hour, minute, second);
}
private String getUTCdateTime() {
}
public void sendOnvifRequest(RequestType requestType, String xAddr) {
- logger.trace("Sending ONVIF request:{}", requestType);
+ logger.trace("Sending ONVIF request: {}", requestType);
String security = "";
String extraEnvelope = "";
String headerTo = "";
bootstrap = localBootstap;
}
if (!mainEventLoopGroup.isShuttingDown()) {
- bootstrap.connect(new InetSocketAddress(ipAddress, onvifPort)).addListener(new ChannelFutureListener() {
+ localBootstap.connect(new InetSocketAddress(ipAddress, onvifPort)).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(@Nullable ChannelFuture future) {
connectError = false;
Channel ch = future.channel();
ch.writeAndFlush(request);
- } else { // an error occured
+ } else { // an error occurred
if (future.isDone() && !future.isCancelled()) {
Throwable cause = future.cause();
+ String msg = cause.getMessage();
logger.trace("connect failed - cause {}", cause.getMessage());
if (cause instanceof ConnectTimeoutException) {
logger.debug("Camera is not reachable on IP {}", ipAddress);
connectError = true;
- } else if ((cause instanceof ConnectException)
- && cause.getMessage().contains("Connection refused")) {
+ } else if ((cause instanceof ConnectException) && msg != null
+ && msg.contains("Connection refused")) {
logger.debug("Camera ONVIF port {} is refused.", onvifPort);
refusedError = true;
}
} else {// 192.168.1.1
ipAddress = url;
deviceXAddr = "http://" + ipAddress + "/onvif/device_service";
- logger.debug("No Onvif Port found when parsing:{}", url);
+ logger.debug("No ONVIF Port found when parsing: {}", url);
return;
}
deviceXAddr = "http://" + ipAddress + ":" + onvifPort + "/onvif/device_service";
}
String dataName = Helper.fetchXML(eventMessage, "tt:Data", "Name=\"");
String dataValue = Helper.fetchXML(eventMessage, "tt:Data", "Value=\"");
- logger.debug("Onvif Event Topic:{}, Data:{}, Value:{}", topic, dataName, dataValue);
+ logger.debug("ONVIF Event Topic: {}, Data: {}, Value: {}", topic, dataName, dataValue);
switch (topic) {
case "RuleEngine/CellMotionDetector/Motion":
if ("true".equals(dataValue)) {
}
break;
default:
- logger.debug("Please report this camera has an un-implemented ONVIF event. Topic:{}", topic);
+ logger.debug("Please report this camera has an un-implemented ONVIF event. Topic: {}", topic);
}
sendOnvifRequest(RequestType.Renew, subscriptionXAddr);
}
if (startLookingFromIndex >= 0) {
temp = Helper.fetchXML(message.substring(startLookingFromIndex), heading, key);
if (!temp.isEmpty()) {
- logger.trace("String was found:{}", temp);
+ logger.trace("String was found: {}", temp);
results.add(temp);
} else {
return results;// key string must not exist so stop looking.
currentPanCamValue = Float.parseFloat(result.substring(beginIndex + 3, endIndex));
currentPanPercentage = (((panRangeMin - currentPanCamValue) * -1) / ((panRangeMin - panRangeMax) * -1))
* 100;
- logger.debug("Pan is updating to:{} and the cam value is {}", Math.round(currentPanPercentage),
+ logger.debug("Pan is updating to: {} and the cam value is {}", Math.round(currentPanPercentage),
currentPanCamValue);
} else {
logger.warn(
currentTiltCamValue = Float.parseFloat(result.substring(beginIndex + 3, endIndex));
currentTiltPercentage = (((tiltRangeMin - currentTiltCamValue) * -1) / ((tiltRangeMin - tiltRangeMax) * -1))
* 100;
- logger.debug("Tilt is updating to:{} and the cam value is {}", Math.round(currentTiltPercentage),
+ logger.debug("Tilt is updating to: {} and the cam value is {}", Math.round(currentTiltPercentage),
currentTiltCamValue);
} else {
return;
if (beginIndex >= 0 && endIndex >= 0) {
currentZoomCamValue = Float.parseFloat(result.substring(beginIndex + 3, endIndex));
currentZoomPercentage = (((zoomMin - currentZoomCamValue) * -1) / ((zoomMin - zoomMax) * -1)) * 100;
- logger.debug("Zoom is updating to:{} and the cam value is {}", Math.round(currentZoomPercentage),
+ logger.debug("Zoom is updating to: {} and the cam value is {}", Math.round(currentZoomPercentage),
currentZoomCamValue);
} else {
return;
} catch (InterruptedException e) {
logger.warn("ONVIF was not cleanly shutdown, due to being interrupted");
} finally {
- logger.debug("Eventloop is shutdown:{}", mainEventLoopGroup.isShutdown());
+ logger.debug("Eventloop is shutdown: {}", mainEventLoopGroup.isShutdown());
bootstrap = null;
threadPool.shutdown();
}
*
* @author Matthew Skinner - Initial contribution
*/
-
@NonNullByDefault
@io.netty.channel.ChannelHandler.Sharable
public class OnvifDiscovery {
String temp = url;
BigDecimal onvifPort = new BigDecimal(80);
- logger.info("Camera found at xAddr:{}", url);
+ logger.info("Camera found at xAddr: {}", url);
int endIndex = temp.indexOf(" ");// Some xAddr have two urls with a space in between.
if (endIndex > 0) {
temp = temp.substring(0, endIndex);// Use only the first url from now on.
void processCameraReplys() {
for (DatagramPacket packet : listOfReplys) {
String xml = packet.content().toString(CharsetUtil.UTF_8);
- logger.trace("Device replied to discovery with:{}", xml);
+ logger.trace("Device replied to discovery with: {}", xml);
String xAddr = Helper.fetchXML(xml, "", "d:XAddrs>");// Foscam <wsdd:XAddrs> and all other brands <d:XAddrs>
if (!xAddr.isEmpty()) {
searchReply(xAddr, xml);
} catch (IOException e) {
brand = "onvif";
}
- logger.info("Possible {} camera found at:{}", brand, packet.sender().getHostString());
+ logger.debug("Possible {} camera found at: {}", brand, packet.sender().getHostString());
if ("reolink".equals(brand)) {
ipCameraDiscoveryService.newCameraFound(brand, packet.sender().getHostString(), 8000);
} else {
response += temp;
}
reply.close();
- logger.trace("Cameras Login page is:{}", response);
+ logger.trace("Cameras Login page is: {}", response);
brand = checkForBrand(response);
} catch (MalformedURLException e) {
} finally {
/**
* The {@link NettyRtspHandler} is used to decode RTSP traffic into message Strings.
*
- *
* @author Matthew Skinner - Initial contribution
*/
@NonNullByDefault
* The {@link RtspConnection} is a WIP and not currently used, but will talk directly to RTSP and collect information
* about the camera and streams.
*
- *
* @author Matthew Skinner - Initial contribution
*/
@NonNullByDefault
Ffmpeg localFfmpeg = handler.ffmpegHLS;
if (localFfmpeg == null) {
handler.setupFfmpegFormat(FFmpegFormat.HLS);
- } else if (!localFfmpeg.getIsAlive()) {
+ } else if (!localFfmpeg.isAlive()) {
localFfmpeg.startConverting();
} else {
localFfmpeg.setKeepAlive(8);
httpService.registerServlet("/ipcamera/" + handler.getThing().getUID().getId(), this, initParameters,
httpService.createDefaultHttpContext());
} catch (Exception e) {
- logger.warn("Registering servlet failed:{}", e.getMessage());
+ logger.warn("Registering servlet failed: {}", e.getMessage());
}
}
* The {@link OpenStreams} Keeps track of all open mjpeg streams so the byte[] can be given to all FIFO buffers to allow
* 1 to many streams without needing to open more than 1 source stream.
*
- *
* @author Matthew Skinner - Initial contribution
*/
-
@NonNullByDefault
public class OpenStreams {
private List<StreamOutput> openStreams = Collections.synchronizedList(new ArrayList<StreamOutput>());
*
* @author Matthew Skinner - Initial contribution
*/
-
@NonNullByDefault
public class StreamOutput {
public final Logger logger = LoggerFactory.getLogger(getClass());
try {
fifo.add(frame);
} catch (IllegalStateException e) {
- logger.debug("FIFO buffer has run out of space:{}", e.getMessage());
+ logger.debug("FIFO buffer has run out of space: {}", e.getMessage());
fifo.remove();
fifo.add(frame);
}