2 * Copyright (c) 2010-2024 Contributors to the openHAB project
4 * See the NOTICE file(s) distributed with this work for additional
7 * This program and the accompanying materials are made available under the
8 * terms of the Eclipse Public License 2.0 which is available at
9 * http://www.eclipse.org/legal/epl-2.0
11 * SPDX-License-Identifier: EPL-2.0
13 package org.openhab.persistence.influxdb;
15 import static org.openhab.persistence.influxdb.internal.InfluxDBConstants.*;
17 import java.time.Instant;
18 import java.time.ZoneId;
19 import java.time.ZonedDateTime;
20 import java.util.ArrayList;
21 import java.util.HashMap;
22 import java.util.HashSet;
23 import java.util.List;
24 import java.util.Locale;
26 import java.util.Objects;
28 import java.util.concurrent.BlockingQueue;
29 import java.util.concurrent.CompletableFuture;
30 import java.util.concurrent.LinkedBlockingQueue;
31 import java.util.concurrent.ScheduledFuture;
32 import java.util.concurrent.TimeUnit;
33 import java.util.stream.Collectors;
35 import org.eclipse.jdt.annotation.NonNullByDefault;
36 import org.eclipse.jdt.annotation.Nullable;
37 import org.openhab.core.common.ThreadPoolManager;
38 import org.openhab.core.config.core.ConfigurableService;
39 import org.openhab.core.items.Item;
40 import org.openhab.core.items.ItemFactory;
41 import org.openhab.core.items.ItemRegistry;
42 import org.openhab.core.items.ItemUtil;
43 import org.openhab.core.persistence.FilterCriteria;
44 import org.openhab.core.persistence.HistoricItem;
45 import org.openhab.core.persistence.ModifiablePersistenceService;
46 import org.openhab.core.persistence.PersistenceItemInfo;
47 import org.openhab.core.persistence.PersistenceService;
48 import org.openhab.core.persistence.QueryablePersistenceService;
49 import org.openhab.core.persistence.strategy.PersistenceStrategy;
50 import org.openhab.core.types.State;
51 import org.openhab.core.types.UnDefType;
52 import org.openhab.persistence.influxdb.internal.FilterCriteriaQueryCreator;
53 import org.openhab.persistence.influxdb.internal.InfluxDBConfiguration;
54 import org.openhab.persistence.influxdb.internal.InfluxDBHistoricItem;
55 import org.openhab.persistence.influxdb.internal.InfluxDBMetadataService;
56 import org.openhab.persistence.influxdb.internal.InfluxDBPersistentItemInfo;
57 import org.openhab.persistence.influxdb.internal.InfluxDBRepository;
58 import org.openhab.persistence.influxdb.internal.InfluxDBStateConvertUtils;
59 import org.openhab.persistence.influxdb.internal.InfluxPoint;
60 import org.openhab.persistence.influxdb.internal.influx1.InfluxDB1RepositoryImpl;
61 import org.openhab.persistence.influxdb.internal.influx2.InfluxDB2RepositoryImpl;
62 import org.osgi.framework.Constants;
63 import org.osgi.service.component.annotations.Activate;
64 import org.osgi.service.component.annotations.Component;
65 import org.osgi.service.component.annotations.Deactivate;
66 import org.osgi.service.component.annotations.Reference;
67 import org.osgi.service.component.annotations.ReferenceCardinality;
68 import org.osgi.service.component.annotations.ReferencePolicy;
69 import org.slf4j.Logger;
70 import org.slf4j.LoggerFactory;
73 * This is the implementation of the InfluxDB {@link PersistenceService}. It
74 * persists item values using the <a href="http://influxdb.org">InfluxDB</a> time
75 * series database. The states ({@link State}) of an {@link Item} are persisted
76 * by default in a time series with names equal to the name of the item.
78 * This addon supports 1.X and 2.X versions, as two versions are incompatible
79 * and use different drivers the specific code for each version is accessed by
80 * {@link InfluxDBRepository} and {@link FilterCriteriaQueryCreator} interfaces
81 * and specific implementation reside in
82 * {@link org.openhab.persistence.influxdb.internal.influx1} and
83 * {@link org.openhab.persistence.influxdb.internal.influx2} packages
85 * @author Theo Weiss - Initial contribution, rewrite of
86 * org.openhab.persistence.influxdb
87 * @author Joan Pujol Espinar - Addon rewrite refactoring code and adding
88 * support for InfluxDB 2.0. Some tag code is based from not integrated
89 * branch from Dominik Vorreiter
92 @Component(service = { PersistenceService.class,
93 QueryablePersistenceService.class }, configurationPid = "org.openhab.influxdb", //
94 property = Constants.SERVICE_PID + "=org.openhab.influxdb")
95 @ConfigurableService(category = "persistence", label = "InfluxDB Persistence Service", description_uri = InfluxDBPersistenceService.CONFIG_URI)
96 public class InfluxDBPersistenceService implements ModifiablePersistenceService {
97 public static final String SERVICE_NAME = "influxdb";
99 private final Logger logger = LoggerFactory.getLogger(InfluxDBPersistenceService.class);
101 private static final int COMMIT_INTERVAL = 3; // in s
102 protected static final String CONFIG_URI = "persistence:influxdb";
104 // External dependencies
105 private final ItemRegistry itemRegistry;
106 private final InfluxDBMetadataService influxDBMetadataService;
108 private final InfluxDBConfiguration configuration;
109 private final InfluxDBRepository influxDBRepository;
110 private boolean serviceActivated;
113 private final ScheduledFuture<?> storeJob;
114 private final BlockingQueue<InfluxPoint> pointsQueue = new LinkedBlockingQueue<>();
117 private final Set<ItemFactory> itemFactories = new HashSet<>();
118 private Map<String, Class<? extends State>> desiredClasses = new HashMap<>();
121 public InfluxDBPersistenceService(final @Reference ItemRegistry itemRegistry,
122 final @Reference InfluxDBMetadataService influxDBMetadataService, Map<String, Object> config) {
123 this.itemRegistry = itemRegistry;
124 this.influxDBMetadataService = influxDBMetadataService;
125 this.configuration = new InfluxDBConfiguration(config);
126 if (configuration.isValid()) {
127 this.influxDBRepository = createInfluxDBRepository();
128 this.influxDBRepository.connect();
129 this.storeJob = ThreadPoolManager.getScheduledPool("org.openhab.influxdb")
130 .scheduleWithFixedDelay(this::commit, COMMIT_INTERVAL, COMMIT_INTERVAL, TimeUnit.SECONDS);
131 serviceActivated = true;
133 throw new IllegalArgumentException("Configuration invalid.");
136 logger.info("InfluxDB persistence service started.");
139 // Visible for testing
140 protected InfluxDBRepository createInfluxDBRepository() throws IllegalArgumentException {
141 return switch (configuration.getVersion()) {
142 case V1 -> new InfluxDB1RepositoryImpl(configuration, influxDBMetadataService);
143 case V2 -> new InfluxDB2RepositoryImpl(configuration, influxDBMetadataService);
144 default -> throw new IllegalArgumentException("Failed to instantiate repository.");
149 * Disconnect from database when service is deactivated
152 public void deactivate() {
153 serviceActivated = false;
155 storeJob.cancel(false);
156 commit(); // ensure we at least tried to store the data;
158 if (!pointsQueue.isEmpty()) {
159 logger.warn("InfluxDB failed to finally store {} points.", pointsQueue.size());
162 influxDBRepository.disconnect();
163 logger.info("InfluxDB persistence service stopped.");
167 public String getId() {
172 public String getLabel(@Nullable Locale locale) {
173 return "InfluxDB persistence layer";
177 public Set<PersistenceItemInfo> getItemInfo() {
178 if (checkConnection()) {
179 return influxDBRepository.getStoredItemsCount().entrySet().stream().map(InfluxDBPersistentItemInfo::new)
180 .collect(Collectors.toUnmodifiableSet());
182 logger.info("getItemInfo ignored, InfluxDB is not connected");
188 public void store(Item item) {
193 public void store(Item item, @Nullable String alias) {
194 store(item, ZonedDateTime.now(), item.getState(), alias);
198 public void store(Item item, ZonedDateTime date, State state) {
199 store(item, date, state, null);
203 public void store(Item item, ZonedDateTime date, State state, @Nullable String alias) {
204 if (!serviceActivated) {
205 logger.warn("InfluxDB service not ready. Storing {} rejected.", item);
208 convert(item, state, date.toInstant(), null).thenAccept(point -> {
210 logger.trace("Ignoring item {}, conversion to an InfluxDB point failed.", item.getName());
213 if (pointsQueue.offer(point)) {
214 logger.trace("Queued {} for item {}", point, item);
216 logger.warn("Failed to queue {} for item {}", point, item);
222 public boolean remove(FilterCriteria filter) throws IllegalArgumentException {
223 if (serviceActivated && checkConnection()) {
224 if (filter.getItemName() == null) {
225 logger.warn("Item name is missing in filter {} when trying to remove data.", filter);
228 return influxDBRepository.remove(filter);
230 logger.debug("Remove query {} ignored, InfluxDB is not connected.", filter);
236 public Iterable<HistoricItem> query(FilterCriteria filter) {
237 if (serviceActivated && checkConnection()) {
239 "Query-Filter: itemname: {}, ordering: {}, state: {}, operator: {}, getBeginDate: {}, getEndDate: {}, getPageSize: {}, getPageNumber: {}",
240 filter.getItemName(), filter.getOrdering().toString(), filter.getState(), filter.getOperator(),
241 filter.getBeginDate(), filter.getEndDate(), filter.getPageSize(), filter.getPageNumber());
242 if (filter.getItemName() == null) {
243 logger.warn("Item name is missing in filter {} when querying data.", filter);
247 List<InfluxDBRepository.InfluxRow> results = influxDBRepository.query(filter,
248 configuration.getRetentionPolicy());
249 return results.stream().map(this::mapRowToHistoricItem).collect(Collectors.toList());
251 logger.debug("Query for persisted data ignored, InfluxDB is not connected");
256 private HistoricItem mapRowToHistoricItem(InfluxDBRepository.InfluxRow row) {
257 State state = InfluxDBStateConvertUtils.objectToState(row.value(), row.itemName(), itemRegistry);
258 return new InfluxDBHistoricItem(row.itemName(), state,
259 ZonedDateTime.ofInstant(row.time(), ZoneId.systemDefault()));
263 public List<PersistenceStrategy> getDefaultStrategies() {
264 return List.of(PersistenceStrategy.Globals.RESTORE, PersistenceStrategy.Globals.CHANGE);
268 * check connection and try reconnect
270 * @return true if connected
272 private boolean checkConnection() {
273 if (influxDBRepository.isConnected()) {
275 } else if (serviceActivated) {
276 logger.debug("Connection lost, trying re-connection");
277 return influxDBRepository.connect();
282 private void commit() {
283 if (!pointsQueue.isEmpty() && checkConnection()) {
284 List<InfluxPoint> points = new ArrayList<>();
285 pointsQueue.drainTo(points);
286 if (!influxDBRepository.write(points)) {
287 logger.warn("Re-queuing {} elements, failed to write batch.", points.size());
288 pointsQueue.addAll(points);
289 influxDBRepository.disconnect();
291 logger.trace("Wrote {} elements to database", points.size());
297 * Convert incoming data to an {@link InfluxPoint} for further processing. This is needed because storage is
298 * asynchronous and the item data may have changed.
300 * The method is package-private for testing.
302 * @param item the {@link Item} that needs conversion
303 * @param storeAlias an (optional) alias for the item
304 * @return a {@link CompletableFuture} that contains either <code>null</code> for item states that cannot be
305 * converted or the corresponding {@link InfluxPoint}
307 CompletableFuture<@Nullable InfluxPoint> convert(Item item, State state, Instant timeStamp,
308 @Nullable String storeAlias) {
309 String itemName = item.getName();
310 String itemLabel = item.getLabel();
311 String category = item.getCategory();
312 String itemType = item.getType();
314 if (state instanceof UnDefType) {
315 return CompletableFuture.completedFuture(null);
318 return CompletableFuture.supplyAsync(() -> {
319 String measurementName = storeAlias != null && !storeAlias.isBlank() ? storeAlias : itemName;
320 measurementName = influxDBMetadataService.getMeasurementNameOrDefault(itemName, measurementName);
322 if (configuration.isReplaceUnderscore()) {
323 measurementName = measurementName.replace('_', '.');
326 State storeState = Objects
327 .requireNonNullElse(state.as(desiredClasses.get(ItemUtil.getMainItemType(itemType))), state);
328 Object value = InfluxDBStateConvertUtils.stateToObject(storeState);
330 InfluxPoint.Builder pointBuilder = InfluxPoint.newBuilder(measurementName).withTime(timeStamp)
331 .withValue(value).withTag(TAG_ITEM_NAME, itemName);
333 if (configuration.isAddCategoryTag()) {
334 String categoryName = Objects.requireNonNullElse(category, "n/a");
335 pointBuilder.withTag(TAG_CATEGORY_NAME, categoryName);
338 if (configuration.isAddTypeTag()) {
339 pointBuilder.withTag(TAG_TYPE_NAME, itemType);
342 if (configuration.isAddLabelTag()) {
343 String labelName = Objects.requireNonNullElse(itemLabel, "n/a");
344 pointBuilder.withTag(TAG_LABEL_NAME, labelName);
347 influxDBMetadataService.getMetaData(itemName)
348 .ifPresent(metadata -> metadata.getConfiguration().forEach(pointBuilder::withTag));
350 return pointBuilder.build();
354 @Reference(cardinality = ReferenceCardinality.AT_LEAST_ONE, policy = ReferencePolicy.DYNAMIC)
355 public void setItemFactory(ItemFactory itemFactory) {
356 itemFactories.add(itemFactory);
357 calculateItemTypeClasses();
360 public void unsetItemFactory(ItemFactory itemFactory) {
361 itemFactories.remove(itemFactory);
362 calculateItemTypeClasses();
365 private synchronized void calculateItemTypeClasses() {
366 Map<String, Class<? extends State>> desiredClasses = new HashMap<>();
367 itemFactories.forEach(factory -> {
368 for (String itemType : factory.getSupportedItemTypes()) {
369 Item item = factory.createItem(itemType, "influxItem");
371 item.getAcceptedCommandTypes().stream()
372 .filter(commandType -> commandType.isAssignableFrom(State.class)).findFirst()
373 .map(commandType -> (Class<? extends State>) commandType.asSubclass(State.class))
374 .ifPresent(desiredClass -> desiredClasses.put(itemType, desiredClass));
378 this.desiredClasses = desiredClasses;