Codebase as of c53e4aed26 as an initial commit for the shrunk repo

Signed-off-by: Kai Kreuzer <kai@openhab.org>
This commit is contained in:
Kai Kreuzer
2010-02-20 19:23:32 +01:00
committed by Kai Kreuzer
commit bbf1a7fd29
302 changed files with 29726 additions and 0 deletions

View File

@@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
<attribute name="test" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="output" path="target/classes"/>
</classpath>

View File

@@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>org.openhab.persistence.influxdb</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
</natures>
</projectDescription>

View File

@@ -0,0 +1,13 @@
This content is produced and maintained by the openHAB project.
* Project home: https://www.openhab.org
== Declared Project Licenses
This program and the accompanying materials are made available under the terms
of the Eclipse Public License 2.0 which is available at
https://www.eclipse.org/legal/epl-2.0/.
== Source Code
https://github.com/openhab/openhab-addons

View File

@@ -0,0 +1,48 @@
# InfluxDB (0.9 and newer) Persistence
This service allows you to persist and query states using the [InfluxDB](https://www.influxdata.com/products/influxdb-overview/) and [InfluxDB 2.0](https://v2.docs.influxdata.com/v2.0/) time series database. The persisted values can be queried from within openHAB. There also are nice tools on the web for visualizing InfluxDB time series, such as [Grafana](http://grafana.org/).
## Database Structure
- This service allows you to persist and query states using the time series database.
- The states of an item are persisted in *measurements* points with names equal to the name of the item, or the alias, if one is provided. In both variants, a *tag* named "item" is added, containing the item name.
All values are stored in a *field* called "value" using integers or doubles if possible,`OnOffType` and `OpenClosedType` values are stored using 0 or 1.
- If configured extra tags for item category, label or type can be added fore each point.
Some example entries for an item with the name "speedtest" without any further configuration would look like this:
> Query using Influx DB 2.0 syntax for 1.0 is different
> from(bucket: "default")
|> range(start: -30d)
|> filter(fn: (r) => r._measurement == "speedtest")
name: speedtest
_time _item _value
----- ----- ------
1558302027124000000 speedtest 123289369.0
1558332852716000000 speedtest 80423789.0
## Prerequisites
First of all you have to setup and run an InfluxDB 1.X or 2.X server.
This is very easy and you will find good documentation on it on the
[InfluxDB web site for 2.X version](https://v2.docs.influxdata.com/v2.0/get-started/) and [InfluxDB web site for 1.X version](https://docs.influxdata.com/influxdb/v1.7/).
## Configuration
| Property | Default | Required | Description |
|------------------------------------|-------------------------|----------|------------------------------------------|
| version | V1 | No | InfluxDB database version V1 for 1.X and V2 for 2.x|
| url | http://127.0.0.1:8086 | No | database URL |
| user | openhab | No | name of the database user, e.g. `openhab`|
| password | | No(*) | password of the database user you choose |
| token | | No(*) | token to authenticate the database (only for V2) [Intructions about how to create one](https://v2.docs.influxdata.com/v2.0/security/tokens/create-token/) |
| db | openhab | No | name of the database for V1 and name of the organization for V2 |
| retentionPolicy | autogen | No | name of the retention policy for V1 and name of the bucket for V2 |
(*) For 1.X version you must provide user and password, for 2.X you can use also user and password or a token. That means
that if you use all default values at minimum you must provide a password or a token.
All item- and event-related configuration is defined in the file `persistence/influxdb.persist`.

View File

@@ -0,0 +1,138 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.openhab.addons.bundles</groupId>
<artifactId>org.openhab.addons.reactor.bundles</artifactId>
<version>3.0.0-SNAPSHOT</version>
</parent>
<artifactId>org.openhab.persistence.influxdb</artifactId>
<name>openHAB Add-ons :: Bundles :: Persistence Service :: InfluxDB</name>
<properties>
<bnd.importpackage>
!javax.annotation;!android.*,!com.android.*,!com.google.appengine.*,!dalvik.system,!kotlin.*,!kotlinx.*,!org.conscrypt,!sun.security.ssl,!org.apache.harmony.*,!org.apache.http.*,!rx.*,!org.msgpack.*
</bnd.importpackage>
</properties>
<dependencies>
<!-- START InfluxDB 2.0 -->
<!-- START influxdb-client-java -->
<dependency>
<groupId>com.influxdb</groupId>
<artifactId>influxdb-client-java</artifactId>
<version>1.6.0</version>
</dependency>
<dependency>
<artifactId>influxdb-client-core</artifactId>
<groupId>com.influxdb</groupId>
<version>1.6.0</version>
</dependency>
<dependency>
<artifactId>converter-gson</artifactId>
<groupId>com.squareup.retrofit2</groupId>
<version>2.5.0</version>
</dependency>
<dependency>
<artifactId>converter-scalars</artifactId>
<groupId>com.squareup.retrofit2</groupId>
<version>2.5.0</version>
</dependency>
<dependency>
<artifactId>gson</artifactId>
<groupId>com.google.code.gson</groupId>
<version>2.8.5</version>
</dependency>
<dependency>
<artifactId>gson-fire</artifactId>
<groupId>io.gsonfire</groupId>
<version>1.8.0</version>
</dependency>
<dependency>
<artifactId>okio</artifactId>
<groupId>com.squareup.okio</groupId>
<version>1.17.3</version>
</dependency>
<dependency>
<artifactId>commons-csv</artifactId>
<groupId>org.apache.commons</groupId>
<version>1.6</version>
</dependency>
<dependency>
<artifactId>json</artifactId>
<groupId>org.json</groupId>
<version>20180813</version>
</dependency>
<dependency>
<artifactId>okhttp</artifactId>
<groupId>com.squareup.okhttp3</groupId>
<version>3.14.4</version>
</dependency>
<dependency>
<artifactId>retrofit</artifactId>
<groupId>com.squareup.retrofit2</groupId>
<version>2.6.2</version>
</dependency>
<dependency>
<artifactId>jsr305</artifactId>
<groupId>com.google.code.findbugs</groupId>
<version>3.0.2</version>
</dependency>
<dependency>
<artifactId>logging-interceptor</artifactId>
<groupId>com.squareup.okhttp3</groupId>
<version>3.14.4</version>
</dependency>
<dependency>
<artifactId>rxjava</artifactId>
<groupId>io.reactivex.rxjava2</groupId>
<version>2.2.17</version>
</dependency>
<dependency>
<artifactId>reactive-streams</artifactId>
<groupId>org.reactivestreams</groupId>
<version>1.0.3</version>
</dependency>
<dependency>
<artifactId>swagger-annotations</artifactId>
<groupId>io.swagger</groupId>
<version>1.5.22</version>
</dependency>
<!--END influxdb-client-java -->
<dependency>
<groupId>com.influxdb</groupId>
<artifactId>flux-dsl</artifactId>
<version>1.6.0</version>
</dependency>
<!--END InfluxDB 2.0 -->
<!--START InfluxDB 1.0 -->
<dependency>
<groupId>org.influxdb</groupId>
<artifactId>influxdb-java</artifactId>
<version>2.17</version>
</dependency>
<dependency>
<groupId>com.squareup.retrofit2</groupId>
<artifactId>converter-moshi</artifactId>
<version>2.6.2</version>
</dependency>
<dependency>
<groupId>com.squareup.moshi</groupId>
<artifactId>moshi</artifactId>
<version>1.8.0</version>
</dependency>
<!-- Okhttp & Retrofit from 2.0 are ok -->
<!-- END InfluxDB 1.0 -->
</dependencies>
</project>

View File

@@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<features name="org.openhab.persistence.influxdb-${project.version}" xmlns="http://karaf.apache.org/xmlns/features/v1.4.0">
<repository>
mvn:org.openhab.core.features.karaf/org.openhab.core.features.karaf.openhab-core/${ohc.version}/xml/features
</repository>
<feature name="openhab-persistence-influxdb" description="InfluxDB Persistence" version="${project.version}">
<feature>openhab-runtime-base</feature>
<bundle start-level="80">mvn:org.openhab.addons.bundles/org.openhab.persistence.influxdb/${project.version}
</bundle>
<configfile finalname="${openhab.conf}/services/influxdb.cfg" override="false">
mvn:${project.groupId}/openhab-addons-external3/${project.version}/cfg/influxdb
</configfile>
</feature>
</features>

View File

@@ -0,0 +1,246 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.core.config.core.ConfigurableService;
import org.openhab.core.items.Item;
import org.openhab.core.items.ItemRegistry;
import org.openhab.core.items.MetadataRegistry;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.core.persistence.PersistenceItemInfo;
import org.openhab.core.persistence.PersistenceService;
import org.openhab.core.persistence.QueryablePersistenceService;
import org.openhab.core.persistence.strategy.PersistenceStrategy;
import org.openhab.core.types.State;
import org.openhab.persistence.influxdb.internal.FilterCriteriaQueryCreator;
import org.openhab.persistence.influxdb.internal.InfluxDBConfiguration;
import org.openhab.persistence.influxdb.internal.InfluxDBHistoricItem;
import org.openhab.persistence.influxdb.internal.InfluxDBPersistentItemInfo;
import org.openhab.persistence.influxdb.internal.InfluxDBRepository;
import org.openhab.persistence.influxdb.internal.InfluxDBStateConvertUtils;
import org.openhab.persistence.influxdb.internal.InfluxPoint;
import org.openhab.persistence.influxdb.internal.InfluxRow;
import org.openhab.persistence.influxdb.internal.ItemToStorePointCreator;
import org.openhab.persistence.influxdb.internal.RepositoryFactory;
import org.osgi.framework.Constants;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Modified;
import org.osgi.service.component.annotations.Reference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is the implementation of the InfluxDB {@link PersistenceService}. It persists item values
* using the <a href="http://influxdb.org">InfluxDB time series database. The states (
* {@link State}) of an {@link Item} are persisted by default in a time series with names equal to the name of
* the item.
*
* This addon supports 1.X and 2.X versions, as two versions are incompatible and use different drivers the
* specific code for each version is accessed by {@link InfluxDBRepository} and {@link FilterCriteriaQueryCreator}
* interfaces and specific implementation reside in {@link org.openhab.persistence.influxdb.internal.influx1} and
* {@link org.openhab.persistence.influxdb.internal.influx2} packages
*
* @author Theo Weiss - Initial contribution, rewrite of org.openhab.persistence.influxdb
* @author Joan Pujol Espinar - Addon rewrite refactoring code and adding support for InfluxDB 2.0. Some tag code is
* based
* from not integrated branch from Dominik Vorreiter
*/
@NonNullByDefault
@Component(service = { PersistenceService.class,
QueryablePersistenceService.class }, configurationPid = "org.openhab.influxdb", //
property = Constants.SERVICE_PID + "=org.openhab.influxdb")
@ConfigurableService(category = "persistence", label = "InfluxDB Persistence Service", description_uri = InfluxDBPersistenceService.CONFIG_URI)
public class InfluxDBPersistenceService implements QueryablePersistenceService {
public static final String SERVICE_NAME = "influxdb";
private final Logger logger = LoggerFactory.getLogger(InfluxDBPersistenceService.class);
protected static final String CONFIG_URI = "persistence:influxdb";
// External dependencies
private final ItemRegistry itemRegistry;
private final MetadataRegistry metadataRegistry;
// Internal dependencies/state
private InfluxDBConfiguration configuration = InfluxDBConfiguration.NO_CONFIGURATION;
// Relax rules because can only be null if component is not active
private @NonNullByDefault({}) ItemToStorePointCreator itemToStorePointCreator;
private @NonNullByDefault({}) InfluxDBRepository influxDBRepository;
@Activate
public InfluxDBPersistenceService(final @Reference ItemRegistry itemRegistry,
final @Reference MetadataRegistry metadataRegistry) {
this.itemRegistry = itemRegistry;
this.metadataRegistry = metadataRegistry;
}
/**
* Connect to database when service is activated
*/
@Activate
public void activate(final @Nullable Map<String, @Nullable Object> config) {
logger.debug("InfluxDB persistence service is being activated");
if (loadConfiguration(config)) {
itemToStorePointCreator = new ItemToStorePointCreator(configuration, metadataRegistry);
influxDBRepository = createInfluxDBRepository();
influxDBRepository.connect();
} else {
logger.error("Cannot load configuration, persistence service wont work");
}
logger.debug("InfluxDB persistence service is now activated");
}
// Visible for testing
protected InfluxDBRepository createInfluxDBRepository() {
return RepositoryFactory.createRepository(configuration);
}
/**
* Disconnect from database when service is deactivated
*/
@Deactivate
public void deactivate() {
logger.debug("InfluxDB persistence service deactivated");
if (influxDBRepository != null) {
influxDBRepository.disconnect();
influxDBRepository = null;
}
if (itemToStorePointCreator != null) {
itemToStorePointCreator = null;
}
}
/**
* Rerun deactivation/activation code each time configuration is changed
*/
@Modified
protected void modified(@Nullable Map<String, @Nullable Object> config) {
if (config != null) {
logger.debug("Config has been modified will deactivate/activate with new config");
deactivate();
activate(config);
} else {
logger.warn("Null configuration, ignoring");
}
}
private boolean loadConfiguration(@Nullable Map<String, @Nullable Object> config) {
boolean configurationIsValid;
if (config != null) {
configuration = new InfluxDBConfiguration(config);
configurationIsValid = configuration.isValid();
if (configurationIsValid) {
logger.debug("Loaded configuration {}", config);
} else {
logger.warn("Some configuration properties are not valid {}", config);
}
} else {
configuration = InfluxDBConfiguration.NO_CONFIGURATION;
configurationIsValid = false;
logger.warn("Ignoring configuration because it's null");
}
return configurationIsValid;
}
@Override
public String getId() {
return SERVICE_NAME;
}
@Override
public String getLabel(@Nullable Locale locale) {
return "InfluxDB persistence layer";
}
@Override
public Set<PersistenceItemInfo> getItemInfo() {
if (influxDBRepository != null && influxDBRepository.isConnected()) {
return influxDBRepository.getStoredItemsCount().entrySet().stream()
.map(entry -> new InfluxDBPersistentItemInfo(entry.getKey(), entry.getValue()))
.collect(Collectors.toUnmodifiableSet());
} else {
logger.info("getItemInfo ignored, InfluxDB is not yet connected");
return Collections.emptySet();
}
}
@Override
public void store(Item item) {
store(item, item.getName());
}
@Override
public void store(Item item, @Nullable String alias) {
if (influxDBRepository != null && influxDBRepository.isConnected()) {
InfluxPoint point = itemToStorePointCreator.convert(item, alias);
if (point != null) {
logger.trace("Storing item {} in InfluxDB point {}", item, point);
influxDBRepository.write(point);
} else {
logger.trace("Ignoring item {} as is cannot be converted to a InfluxDB point", item);
}
} else {
logger.debug("store ignored, InfluxDB is not yet connected");
}
}
@Override
public Iterable<HistoricItem> query(FilterCriteria filter) {
logger.debug("Got a query for historic points!");
if (influxDBRepository != null && influxDBRepository.isConnected()) {
logger.trace(
"Filter: itemname: {}, ordering: {}, state: {}, operator: {}, getBeginDate: {}, getEndDate: {}, getPageSize: {}, getPageNumber: {}",
filter.getItemName(), filter.getOrdering().toString(), filter.getState(), filter.getOperator(),
filter.getBeginDate(), filter.getEndDate(), filter.getPageSize(), filter.getPageNumber());
String query = RepositoryFactory.createQueryCreator(configuration).createQuery(filter,
configuration.getRetentionPolicy());
logger.trace("Query {}", query);
List<InfluxRow> results = influxDBRepository.query(query);
return results.stream().map(this::mapRow2HistoricItem).collect(Collectors.toList());
} else {
logger.debug("query ignored, InfluxDB is not yet connected");
return Collections.emptyList();
}
}
private HistoricItem mapRow2HistoricItem(InfluxRow row) {
State state = InfluxDBStateConvertUtils.objectToState(row.getValue(), row.getItemName(), itemRegistry);
return new InfluxDBHistoricItem(row.getItemName(), state,
ZonedDateTime.ofInstant(row.getTime(), ZoneId.systemDefault()));
}
@Override
public List<PersistenceStrategy> getDefaultStrategies() {
return List.of(PersistenceStrategy.Globals.RESTORE, PersistenceStrategy.Globals.CHANGE);
}
}

View File

@@ -0,0 +1,52 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.openhab.core.persistence.FilterCriteria;
/**
* Creates InfluxDB query sentence given a OpenHab persistence {@link FilterCriteria}
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public interface FilterCriteriaQueryCreator {
/**
* Create query from {@link FilterCriteria}
*
* @param criteria Criteria to create query from
* @param retentionPolicy Name of the retentionPolicy/bucket to use in query
* @return Created query as an String
*/
String createQuery(FilterCriteria criteria, String retentionPolicy);
default String getOperationSymbol(FilterCriteria.Operator operator, InfluxDBVersion version) {
switch (operator) {
case EQ:
return "=";
case LT:
return "<";
case LTE:
return "<=";
case GT:
return ">";
case GTE:
return ">=";
case NEQ:
return version == InfluxDBVersion.V1 ? "<>" : "!=";
default:
throw new UnnexpectedConditionException("Not expected operator " + operator);
}
}
}

View File

@@ -0,0 +1,190 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import java.util.Collections;
import java.util.Map;
import java.util.StringJoiner;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Contains this addon configurable parameters
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class InfluxDBConfiguration {
public static final String URL_PARAM = "url";
public static final String TOKEN_PARAM = "token";
public static final String USER_PARAM = "user";
public static final String PASSWORD_PARAM = "password";
public static final String DATABASE_PARAM = "db";
public static final String RETENTION_POLICY_PARAM = "retentionPolicy";
public static final String VERSION_PARAM = "version";
public static final String REPLACE_UNDERSCORE_PARAM = "replaceUnderscore";
public static final String ADD_CATEGORY_TAG_PARAM = "addCategoryTag";
public static final String ADD_LABEL_TAG_PARAM = "addLabelTag";
public static final String ADD_TYPE_TAG_PARAM = "addTypeTag";
public static InfluxDBConfiguration NO_CONFIGURATION = new InfluxDBConfiguration(Collections.emptyMap());
private final Logger logger = LoggerFactory.getLogger(InfluxDBConfiguration.class);
private final String url;
private final String user;
private final String password;
private final String token;
private final String databaseName;
private final String retentionPolicy;
private final InfluxDBVersion version;
private final boolean replaceUnderscore;
private final boolean addCategoryTag;
private final boolean addTypeTag;
private final boolean addLabelTag;
public InfluxDBConfiguration(Map<String, @Nullable Object> config) {
url = (@NonNull String) config.getOrDefault(URL_PARAM, "http://127.0.0.1:8086");
user = (@NonNull String) config.getOrDefault(USER_PARAM, "openhab");
password = (@NonNull String) config.getOrDefault(PASSWORD_PARAM, "");
token = (@NonNull String) config.getOrDefault(TOKEN_PARAM, "");
databaseName = (@NonNull String) config.getOrDefault(DATABASE_PARAM, "openhab");
retentionPolicy = (@NonNull String) config.getOrDefault(RETENTION_POLICY_PARAM, "autogen");
version = parseInfluxVersion(config.getOrDefault(VERSION_PARAM, InfluxDBVersion.V1.name()));
replaceUnderscore = getConfigBooleanValue(config, REPLACE_UNDERSCORE_PARAM, false);
addCategoryTag = getConfigBooleanValue(config, ADD_CATEGORY_TAG_PARAM, false);
addLabelTag = getConfigBooleanValue(config, ADD_LABEL_TAG_PARAM, false);
addTypeTag = getConfigBooleanValue(config, ADD_TYPE_TAG_PARAM, false);
}
private static boolean getConfigBooleanValue(Map<String, @Nullable Object> config, String key,
boolean defaultValue) {
Object object = config.get(key);
if (object instanceof Boolean) {
return (Boolean) object;
} else if (object instanceof String) {
return "true".equalsIgnoreCase((String) object);
} else {
return defaultValue;
}
}
private InfluxDBVersion parseInfluxVersion(@Nullable Object value) {
try {
return InfluxDBVersion.valueOf((String) value);
} catch (RuntimeException e) {
logger.warn("Invalid version {}", value);
return InfluxDBVersion.UNKNOWN;
}
}
public boolean isValid() {
boolean hasVersion = version != InfluxDBVersion.UNKNOWN;
boolean hasCredentials = false;
if (version == InfluxDBVersion.V1) {
hasCredentials = !user.isBlank() && !password.isBlank();
} else if (version == InfluxDBVersion.V2) {
hasCredentials = !token.isBlank() || (!user.isBlank() && !password.isBlank());
}
boolean hasDatabase = !databaseName.isBlank();
boolean hasRetentionPolicy = !retentionPolicy.isBlank();
boolean valid = hasVersion && hasCredentials && hasDatabase && hasRetentionPolicy;
if (valid) {
return true;
} else {
String msg = "InfluxDB configuration isn't valid. Addon won't work: ";
StringJoiner reason = new StringJoiner(",");
if (!hasVersion) {
reason.add("Unknown version");
} else {
if (!hasCredentials) {
reason.add("No credentials");
}
if (!hasDatabase) {
reason.add("No database name / organization defined");
}
if (!hasRetentionPolicy) {
reason.add("No retention policy / bucket defined");
}
}
logger.warn("{} {}", msg, reason);
return false;
}
}
public String getUrl() {
return url;
}
public String getToken() {
return token;
}
public String getDatabaseName() {
return databaseName;
}
public String getRetentionPolicy() {
return retentionPolicy;
}
public boolean isReplaceUnderscore() {
return replaceUnderscore;
}
public boolean isAddCategoryTag() {
return addCategoryTag;
}
public boolean isAddTypeTag() {
return addTypeTag;
}
public boolean isAddLabelTag() {
return addLabelTag;
}
public String getUser() {
return user;
}
public String getPassword() {
return password;
}
public InfluxDBVersion getVersion() {
return version;
}
@Override
public String toString() {
String sb = "InfluxDBConfiguration{" + "url='" + url + '\'' + ", user='" + user + '\'' + ", password='"
+ password.length() + " chars" + '\'' + ", token='" + token.length() + " chars" + '\''
+ ", databaseName='" + databaseName + '\'' + ", retentionPolicy='" + retentionPolicy + '\''
+ ", version=" + version + ", replaceUnderscore=" + replaceUnderscore + ", addCategoryTag="
+ addCategoryTag + ", addTypeTag=" + addTypeTag + ", addLabelTag=" + addLabelTag + '}';
return sb;
}
public int getTokenLength() {
return token.length();
}
public char[] getTokenAsCharArray() {
return token.toCharArray();
}
}

View File

@@ -0,0 +1,35 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import org.eclipse.jdt.annotation.NonNullByDefault;
/**
* Constants used by this addon
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class InfluxDBConstants {
public static final String COLUMN_VALUE_NAME_V1 = "value";
public static final String COLUMN_VALUE_NAME_V2 = "_value";
public static final String COLUMN_TIME_NAME_V1 = "time";
public static final String COLUMN_TIME_NAME_V2 = "_time";
public static final String FIELD_VALUE_NAME = "value";
public static final String TAG_ITEM_NAME = "item";
public static final String TAG_CATEGORY_NAME = "category";
public static final String TAG_TYPE_NAME = "type";
public static final String TAG_LABEL_NAME = "label";
}

View File

@@ -0,0 +1,73 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import java.text.DateFormat;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.core.types.State;
import org.openhab.core.types.UnDefType;
/**
* Java bean used to return items queries results from InfluxDB.
*
* @author Theo Weiss - Initial Contribution
* @author Joan Pujol Espinar - Addon rewrite refactoring code and adding support for InfluxDB 2.0.
*/
@NonNullByDefault
public class InfluxDBHistoricItem implements HistoricItem {
private String name = "";
private State state = UnDefType.NULL;
private ZonedDateTime timestamp;
public InfluxDBHistoricItem(String name, State state, ZonedDateTime timestamp) {
this.name = name;
this.state = state;
this.timestamp = timestamp;
}
@Override
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public State getState() {
return state;
}
public void setState(State state) {
this.state = state;
}
@Override
public ZonedDateTime getTimestamp() {
return timestamp;
}
public void setTimestamp(ZonedDateTime timestamp) {
this.timestamp = timestamp;
}
@Override
public String toString() {
return DateFormat.getDateTimeInstance().format(timestamp) + ": " + name + " -> " + state.toString();
}
}

View File

@@ -0,0 +1,58 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import java.util.Date;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.core.persistence.PersistenceItemInfo;
/**
* Java bean used to return information about stored items
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class InfluxDBPersistentItemInfo implements PersistenceItemInfo {
private final String name;
private final Integer count;
public InfluxDBPersistentItemInfo(String name, Integer count) {
this.name = name;
this.count = count;
}
@Override
public String getName() {
return name;
}
@Override
@Nullable
public Integer getCount() {
return count;
}
@Override
@Nullable
public Date getEarliest() {
return null;
}
@Override
@Nullable
public Date getLatest() {
return null;
}
}

View File

@@ -0,0 +1,74 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import java.util.List;
import java.util.Map;
import org.eclipse.jdt.annotation.NonNullByDefault;
/**
* Manages InfluxDB server interaction maintaining client connection
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public interface InfluxDBRepository {
/**
* Returns if the client is successfully connected to server
*
* @return True if it's connected, otherwise false
*/
boolean isConnected();
/**
* Connect to InfluxDB server
*
* @return True if successful, otherwise false
*/
boolean connect();
/**
* Disconnect from InfluxDB server
*/
void disconnect();
/**
* Check if connection is currently ready
*
* @return True if its ready, otherwise false
*/
boolean checkConnectionStatus();
/**
* Return all stored item names with it's count of stored points
*
* @return Map with <ItemName,ItemCount> entries
*/
Map<String, Integer> getStoredItemsCount();
/**
* Executes Flux query
*
* @param query Query
* @return Query results
*/
List<InfluxRow> query(String query);
/**
* Write point to database
*
* @param influxPoint Point to write
*/
void write(InfluxPoint influxPoint);
}

View File

@@ -0,0 +1,186 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import java.math.BigDecimal;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.util.TimeZone;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.core.items.GroupItem;
import org.openhab.core.items.Item;
import org.openhab.core.items.ItemNotFoundException;
import org.openhab.core.items.ItemRegistry;
import org.openhab.core.library.items.ColorItem;
import org.openhab.core.library.items.ContactItem;
import org.openhab.core.library.items.DateTimeItem;
import org.openhab.core.library.items.DimmerItem;
import org.openhab.core.library.items.LocationItem;
import org.openhab.core.library.items.NumberItem;
import org.openhab.core.library.items.RollershutterItem;
import org.openhab.core.library.items.SwitchItem;
import org.openhab.core.library.types.DateTimeType;
import org.openhab.core.library.types.DecimalType;
import org.openhab.core.library.types.HSBType;
import org.openhab.core.library.types.OnOffType;
import org.openhab.core.library.types.OpenClosedType;
import org.openhab.core.library.types.PercentType;
import org.openhab.core.library.types.PointType;
import org.openhab.core.library.types.QuantityType;
import org.openhab.core.library.types.StringType;
import org.openhab.core.types.State;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Conversion logic between openHAB {@link State} types and InfluxDB store types
*
* @author Joan Pujol Espinar - Initial contribution, based on previous work from Theo Weiss and Dominik Vorreiter
*/
@NonNullByDefault
public class InfluxDBStateConvertUtils {
static final Number DIGITAL_VALUE_OFF = 0; // Visible for testing
static final Number DIGITAL_VALUE_ON = 1; // Visible for testing
private static Logger logger = LoggerFactory.getLogger(InfluxDBStateConvertUtils.class);
/**
* Converts {@link State} to objects fitting into influxdb values.
*
* @param state to be converted
* @return integer or double value for DecimalType, 0 or 1 for OnOffType and OpenClosedType,
* integer for DateTimeType, String for all others
*/
public static Object stateToObject(State state) {
Object value;
if (state instanceof HSBType) {
value = state.toString();
} else if (state instanceof PointType) {
value = point2String((PointType) state);
} else if (state instanceof DecimalType) {
value = convertBigDecimalToNum(((DecimalType) state).toBigDecimal());
} else if (state instanceof QuantityType<?>) {
value = convertBigDecimalToNum(((QuantityType<?>) state).toBigDecimal());
} else if (state instanceof OnOffType) {
value = state == OnOffType.ON ? DIGITAL_VALUE_ON : DIGITAL_VALUE_OFF;
} else if (state instanceof OpenClosedType) {
value = state == OpenClosedType.OPEN ? DIGITAL_VALUE_ON : DIGITAL_VALUE_OFF;
} else if (state instanceof DateTimeType) {
value = ((DateTimeType) state).getZonedDateTime().toInstant().toEpochMilli();
} else {
value = state.toString();
}
return value;
}
/**
* Converts a value to a {@link State} which is suitable for the given {@link Item}. This is
* needed for querying a {@link InfluxDBHistoricItem}.
*
* @param value to be converted to a {@link State}
* @param itemName name of the {@link Item} to get the {@link State} for
* @return the state of the item represented by the itemName parameter, else the string value of
* the Object parameter
*/
public static State objectToState(Object value, String itemName, @Nullable ItemRegistry itemRegistry) {
State state = null;
if (itemRegistry != null) {
try {
Item item = itemRegistry.getItem(itemName);
state = objectToState(value, item);
} catch (ItemNotFoundException e) {
logger.info("Could not find item '{}' in registry", itemName);
}
}
if (state == null) {
state = new StringType(String.valueOf(value));
}
return state;
}
public static State objectToState(Object value, Item itemToSetState) {
String valueStr = String.valueOf(value);
Item item = itemToSetState;
if (item instanceof GroupItem) {
item = ((GroupItem) item).getBaseItem();
}
if (item instanceof ColorItem) {
return new HSBType(valueStr);
} else if (item instanceof LocationItem) {
return new PointType(valueStr);
} else if (item instanceof NumberItem) {
return new DecimalType(valueStr);
} else if (item instanceof DimmerItem) {
return new PercentType(valueStr);
} else if (item instanceof SwitchItem) {
return toBoolean(valueStr) ? OnOffType.ON : OnOffType.OFF;
} else if (item instanceof ContactItem) {
return toBoolean(valueStr) ? OpenClosedType.OPEN : OpenClosedType.CLOSED;
} else if (item instanceof RollershutterItem) {
return new PercentType(valueStr);
} else if (item instanceof DateTimeItem) {
Instant i = Instant.ofEpochMilli(new BigDecimal(valueStr).longValue());
ZonedDateTime z = ZonedDateTime.ofInstant(i, TimeZone.getDefault().toZoneId());
return new DateTimeType(z);
} else {
return new StringType(valueStr);
}
}
private static boolean toBoolean(@Nullable Object object) {
if (object instanceof Boolean) {
return (Boolean) object;
} else if (object != null) {
if ("1".equals(object)) {
return true;
} else {
return Boolean.valueOf(String.valueOf(object));
}
} else {
return false;
}
}
private static String point2String(PointType point) {
StringBuilder buf = new StringBuilder();
buf.append(point.getLatitude().toString());
buf.append(",");
buf.append(point.getLongitude().toString());
if (!point.getAltitude().equals(DecimalType.ZERO)) {
buf.append(",");
buf.append(point.getAltitude().toString());
}
return buf.toString(); // latitude, longitude, altitude
}
/**
* This method returns an integer if possible if not a double is returned. This is an optimization
* for influxdb because integers have less overhead.
*
* @param value the BigDecimal to be converted
* @return A double if possible else a double is returned.
*/
private static Object convertBigDecimalToNum(BigDecimal value) {
Object convertedValue;
if (value.scale() == 0) {
convertedValue = value.toBigInteger();
} else {
convertedValue = value.doubleValue();
}
return convertedValue;
}
}

View File

@@ -0,0 +1,24 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
/**
* InfluxDB version
*
* @author Joan Pujol Espinar - Initial contribution
*/
public enum InfluxDBVersion {
V1,
V2,
UNKNOWN
}

View File

@@ -0,0 +1,91 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import java.time.Instant;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.eclipse.jdt.annotation.DefaultLocation;
import org.eclipse.jdt.annotation.NonNullByDefault;
/**
* Point data to be stored in InfluxDB
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault({ DefaultLocation.PARAMETER })
public class InfluxPoint {
private String measurementName;
private Instant time;
private Object value;
private Map<String, String> tags;
private InfluxPoint(Builder builder) {
measurementName = builder.measurementName;
time = builder.time;
value = builder.value;
tags = builder.tags;
}
public static Builder newBuilder(String measurementName) {
return new Builder(measurementName);
}
public String getMeasurementName() {
return measurementName;
}
public Instant getTime() {
return time;
}
public Object getValue() {
return value;
}
public Map<String, String> getTags() {
return Collections.unmodifiableMap(tags);
}
public static final class Builder {
private String measurementName;
private Instant time;
private Object value;
private Map<String, String> tags = new HashMap<>();
private Builder(String measurementName) {
this.measurementName = measurementName;
}
public Builder withTime(Instant val) {
time = val;
return this;
}
public Builder withValue(Object val) {
value = val;
return this;
}
public Builder withTag(String name, String value) {
tags.put(name, value);
return this;
}
public InfluxPoint build() {
return new InfluxPoint(this);
}
}
}

View File

@@ -0,0 +1,47 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import java.time.Instant;
import org.eclipse.jdt.annotation.NonNullByDefault;
/**
* Row data returned from database query
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class InfluxRow {
private final String itemName;
private final Instant time;
private final Object value;
public InfluxRow(Instant time, String itemName, Object value) {
this.time = time;
this.itemName = itemName;
this.value = value;
}
public Instant getTime() {
return time;
}
public String getItemName() {
return itemName;
}
public Object getValue() {
return value;
}
}

View File

@@ -0,0 +1,127 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import static org.openhab.persistence.influxdb.internal.InfluxDBConstants.*;
import java.time.Instant;
import java.util.Optional;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.core.items.Item;
import org.openhab.core.items.Metadata;
import org.openhab.core.items.MetadataKey;
import org.openhab.core.items.MetadataRegistry;
import org.openhab.core.types.State;
import org.openhab.core.types.UnDefType;
import org.openhab.persistence.influxdb.InfluxDBPersistenceService;
/**
* Logic to create an InfluxDB {@link InfluxPoint} from an openHAB {@link Item}
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class ItemToStorePointCreator {
private final InfluxDBConfiguration configuration;
private final @Nullable MetadataRegistry metadataRegistry;
public ItemToStorePointCreator(InfluxDBConfiguration configuration, @Nullable MetadataRegistry metadataRegistry) {
this.configuration = configuration;
this.metadataRegistry = metadataRegistry;
}
public @Nullable InfluxPoint convert(Item item, @Nullable String storeAlias) {
if (item.getState() instanceof UnDefType) {
return null;
}
String measurementName = calculateMeasurementName(item, storeAlias);
String itemName = item.getName();
State state = getItemState(item);
Object value = InfluxDBStateConvertUtils.stateToObject(state);
InfluxPoint.Builder point = InfluxPoint.newBuilder(measurementName).withTime(Instant.now()).withValue(value)
.withTag(TAG_ITEM_NAME, itemName);
addPointTags(item, point);
return point.build();
}
private String calculateMeasurementName(Item item, @Nullable String storeAlias) {
String name = storeAlias != null && !storeAlias.isBlank() ? storeAlias : item.getName();
if (configuration.isReplaceUnderscore()) {
name = name.replace('_', '.');
}
return name;
}
private State getItemState(Item item) {
final State state;
final Optional<Class<? extends State>> desiredConversion = calculateDesiredTypeConversionToStore(item);
if (desiredConversion.isPresent()) {
State convertedState = item.getStateAs(desiredConversion.get());
if (convertedState != null) {
state = convertedState;
} else {
state = item.getState();
}
} else {
state = item.getState();
}
return state;
}
private Optional<Class<? extends State>> calculateDesiredTypeConversionToStore(Item item) {
return item.getAcceptedCommandTypes().stream().filter(commandType -> commandType.isAssignableFrom(State.class))
.findFirst().map(commandType -> commandType.asSubclass(State.class));
}
private void addPointTags(Item item, InfluxPoint.Builder point) {
if (configuration.isAddCategoryTag()) {
String categoryName = item.getCategory();
if (categoryName == null) {
categoryName = "n/a";
}
point.withTag(TAG_CATEGORY_NAME, categoryName);
}
if (configuration.isAddTypeTag()) {
point.withTag(TAG_TYPE_NAME, item.getType());
}
if (configuration.isAddLabelTag()) {
String labelName = item.getLabel();
if (labelName == null) {
labelName = "n/a";
}
point.withTag(TAG_LABEL_NAME, labelName);
}
final MetadataRegistry currentMetadataRegistry = metadataRegistry;
if (currentMetadataRegistry != null) {
MetadataKey key = new MetadataKey(InfluxDBPersistenceService.SERVICE_NAME, item.getName());
Metadata metadata = currentMetadataRegistry.get(key);
if (metadata != null) {
metadata.getConfiguration().forEach((tagName, tagValue) -> {
point.withTag(tagName, tagValue.toString());
});
}
}
}
}

View File

@@ -0,0 +1,51 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.openhab.persistence.influxdb.internal.influx1.Influx1FilterCriteriaQueryCreatorImpl;
import org.openhab.persistence.influxdb.internal.influx1.InfluxDB1RepositoryImpl;
import org.openhab.persistence.influxdb.internal.influx2.Influx2FilterCriteriaQueryCreatorImpl;
import org.openhab.persistence.influxdb.internal.influx2.InfluxDB2RepositoryImpl;
/**
* Factory that returns {@link InfluxDBRepository} and {@link FilterCriteriaQueryCreator} implementations
* depending on InfluxDB version
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class RepositoryFactory {
public static InfluxDBRepository createRepository(InfluxDBConfiguration influxDBConfiguration) {
switch (influxDBConfiguration.getVersion()) {
case V1:
return new InfluxDB1RepositoryImpl(influxDBConfiguration);
case V2:
return new InfluxDB2RepositoryImpl(influxDBConfiguration);
default:
throw new UnnexpectedConditionException("Not expected version " + influxDBConfiguration.getVersion());
}
}
public static FilterCriteriaQueryCreator createQueryCreator(InfluxDBConfiguration influxDBConfiguration) {
switch (influxDBConfiguration.getVersion()) {
case V1:
return new Influx1FilterCriteriaQueryCreatorImpl();
case V2:
return new Influx2FilterCriteriaQueryCreatorImpl();
default:
throw new UnnexpectedConditionException("Not expected version " + influxDBConfiguration.getVersion());
}
}
}

View File

@@ -0,0 +1,33 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import org.eclipse.jdt.annotation.NonNullByDefault;
/**
* Throw to indicate an unnexpected condition that should not have happened (a bug)
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class UnnexpectedConditionException extends RuntimeException {
private static final long serialVersionUID = 1128380327167959556L;
public UnnexpectedConditionException(String message) {
super(message);
}
public UnnexpectedConditionException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@@ -0,0 +1,96 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal.influx1;
import static org.influxdb.querybuilder.BuiltQuery.QueryBuilder.*;
import static org.openhab.persistence.influxdb.internal.InfluxDBConstants.*;
import static org.openhab.persistence.influxdb.internal.InfluxDBStateConvertUtils.stateToObject;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.influxdb.dto.Query;
import org.influxdb.querybuilder.Appender;
import org.influxdb.querybuilder.BuiltQuery;
import org.influxdb.querybuilder.Select;
import org.influxdb.querybuilder.Where;
import org.influxdb.querybuilder.clauses.SimpleClause;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.persistence.influxdb.internal.FilterCriteriaQueryCreator;
import org.openhab.persistence.influxdb.internal.InfluxDBVersion;
/**
* Implementation of {@link FilterCriteriaQueryCreator} for InfluxDB 1.0
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class Influx1FilterCriteriaQueryCreatorImpl implements FilterCriteriaQueryCreator {
@Override
public String createQuery(FilterCriteria criteria, String retentionPolicy) {
final String tableName;
boolean hasCriteriaName = criteria.getItemName() != null;
if (hasCriteriaName) {
tableName = criteria.getItemName();
} else {
tableName = "/.*/";
}
Select select = select(COLUMN_VALUE_NAME_V1).fromRaw(null,
fullQualifiedTableName(retentionPolicy, tableName, hasCriteriaName));
Where where = select.where();
if (criteria.getBeginDate() != null) {
where = where.and(
BuiltQuery.QueryBuilder.gte(COLUMN_TIME_NAME_V1, criteria.getBeginDate().toInstant().toString()));
}
if (criteria.getEndDate() != null) {
where = where.and(
BuiltQuery.QueryBuilder.lte(COLUMN_TIME_NAME_V1, criteria.getEndDate().toInstant().toString()));
}
if (criteria.getState() != null && criteria.getOperator() != null) {
where = where.and(new SimpleClause(COLUMN_VALUE_NAME_V1,
getOperationSymbol(criteria.getOperator(), InfluxDBVersion.V1),
stateToObject(criteria.getState())));
}
if (criteria.getOrdering() == FilterCriteria.Ordering.DESCENDING) {
select = select.orderBy(desc());
} else if (criteria.getOrdering() == FilterCriteria.Ordering.ASCENDING) {
select = select.orderBy(asc());
}
if (criteria.getPageSize() != Integer.MAX_VALUE) {
if (criteria.getPageNumber() != 0) {
select = select.limit(criteria.getPageSize(), criteria.getPageSize() * criteria.getPageNumber());
} else {
select = select.limit(criteria.getPageSize());
}
}
final Query query = (Query) select;
return query.getCommand();
}
private String fullQualifiedTableName(String retentionPolicy, String tableName, boolean escapeTableName) {
StringBuilder sb = new StringBuilder();
Appender.appendName(retentionPolicy, sb);
sb.append(".");
if (escapeTableName) {
Appender.appendName(tableName, sb);
} else {
sb.append(tableName);
}
return sb.toString();
}
}

View File

@@ -0,0 +1,211 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal.influx1;
import static org.openhab.persistence.influxdb.internal.InfluxDBConstants.COLUMN_TIME_NAME_V1;
import static org.openhab.persistence.influxdb.internal.InfluxDBConstants.COLUMN_VALUE_NAME_V1;
import static org.openhab.persistence.influxdb.internal.InfluxDBConstants.FIELD_VALUE_NAME;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.Point;
import org.influxdb.dto.Pong;
import org.influxdb.dto.Query;
import org.influxdb.dto.QueryResult;
import org.openhab.persistence.influxdb.internal.InfluxDBConfiguration;
import org.openhab.persistence.influxdb.internal.InfluxDBRepository;
import org.openhab.persistence.influxdb.internal.InfluxPoint;
import org.openhab.persistence.influxdb.internal.InfluxRow;
import org.openhab.persistence.influxdb.internal.UnnexpectedConditionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implementation of {@link InfluxDBRepository} for InfluxDB 1.0
*
* @author Joan Pujol Espinar - Initial contribution. Most code has been moved from
* {@link org.openhab.persistence.influxdb.InfluxDBPersistenceService} where it was in previous version
*/
@NonNullByDefault
public class InfluxDB1RepositoryImpl implements InfluxDBRepository {
private final Logger logger = LoggerFactory.getLogger(InfluxDB1RepositoryImpl.class);
private InfluxDBConfiguration configuration;
@Nullable
private InfluxDB client;
public InfluxDB1RepositoryImpl(InfluxDBConfiguration configuration) {
this.configuration = configuration;
}
@Override
public boolean isConnected() {
return client != null;
}
@Override
public boolean connect() {
final InfluxDB createdClient = InfluxDBFactory.connect(configuration.getUrl(), configuration.getUser(),
configuration.getPassword());
createdClient.setDatabase(configuration.getDatabaseName());
createdClient.setRetentionPolicy(configuration.getRetentionPolicy());
createdClient.enableBatch(200, 100, TimeUnit.MILLISECONDS);
this.client = createdClient;
return checkConnectionStatus();
}
@Override
public void disconnect() {
this.client = null;
}
@Override
public boolean checkConnectionStatus() {
boolean dbStatus = false;
final InfluxDB currentClient = client;
if (currentClient != null) {
try {
Pong pong = currentClient.ping();
String version = pong.getVersion();
// may be check for version >= 0.9
if (version != null && !version.contains("unknown")) {
dbStatus = true;
logger.debug("database status is OK, version is {}", version);
} else {
logger.warn("database ping error, version is: \"{}\" response time was \"{}\"", version,
pong.getResponseTime());
dbStatus = false;
}
} catch (RuntimeException e) {
dbStatus = false;
logger.error("database connection failed", e);
handleDatabaseException(e);
}
} else {
logger.warn("checkConnection: database is not connected");
}
return dbStatus;
}
private void handleDatabaseException(Exception e) {
logger.warn("database error: {}", e.getMessage(), e);
}
@Override
public void write(InfluxPoint point) {
final InfluxDB currentClient = this.client;
if (currentClient != null) {
Point clientPoint = convertPointToClientFormat(point);
currentClient.write(configuration.getDatabaseName(), configuration.getRetentionPolicy(), clientPoint);
} else {
logger.warn("Write point {} ignored due to client isn't connected", point);
}
}
private Point convertPointToClientFormat(InfluxPoint point) {
Point.Builder clientPoint = Point.measurement(point.getMeasurementName()).time(point.getTime().toEpochMilli(),
TimeUnit.MILLISECONDS);
setPointValue(point.getValue(), clientPoint);
point.getTags().entrySet().forEach(e -> clientPoint.tag(e.getKey(), e.getValue()));
return clientPoint.build();
}
private void setPointValue(@Nullable Object value, Point.Builder point) {
if (value instanceof String) {
point.addField(FIELD_VALUE_NAME, (String) value);
} else if (value instanceof Number) {
point.addField(FIELD_VALUE_NAME, (Number) value);
} else if (value instanceof Boolean) {
point.addField(FIELD_VALUE_NAME, (Boolean) value);
} else if (value == null) {
point.addField(FIELD_VALUE_NAME, (String) null);
} else {
throw new UnnexpectedConditionException("Not expected value type");
}
}
@Override
public List<InfluxRow> query(String query) {
final InfluxDB currentClient = client;
if (currentClient != null) {
Query parsedQuery = new Query(query, configuration.getDatabaseName());
List<QueryResult.Result> results = currentClient.query(parsedQuery, TimeUnit.MILLISECONDS).getResults();
return convertClientResutToRepository(results);
} else {
logger.warn("Returning empty list because queryAPI isn't present");
return Collections.emptyList();
}
}
private List<InfluxRow> convertClientResutToRepository(List<QueryResult.Result> results) {
List<InfluxRow> rows = new ArrayList<>();
for (QueryResult.Result result : results) {
List<QueryResult.Series> seriess = result.getSeries();
if (result.getError() != null) {
logger.warn("{}", result.getError());
continue;
}
if (seriess == null) {
logger.debug("query returned no series");
} else {
for (QueryResult.Series series : seriess) {
logger.trace("series {}", series.toString());
String itemName = series.getName();
List<List<Object>> valuess = series.getValues();
if (valuess == null) {
logger.debug("query returned no values");
} else {
List<String> columns = series.getColumns();
logger.trace("columns {}", columns);
if (columns != null) {
Integer timestampColumn = null;
Integer valueColumn = null;
for (int i = 0; i < columns.size(); i++) {
String columnName = columns.get(i);
if (columnName.equals(COLUMN_TIME_NAME_V1)) {
timestampColumn = i;
} else if (columnName.equals(COLUMN_VALUE_NAME_V1)) {
valueColumn = i;
}
}
if (valueColumn == null || timestampColumn == null) {
throw new IllegalStateException("missing column");
}
for (int i = 0; i < valuess.size(); i++) {
Double rawTime = (Double) valuess.get(i).get(timestampColumn);
Instant time = Instant.ofEpochMilli(rawTime.longValue());
Object value = valuess.get(i).get(valueColumn);
logger.trace("adding historic item {}: time {} value {}", itemName, time, value);
rows.add(new InfluxRow(time, itemName, value));
}
}
}
}
}
}
return rows;
}
@Override
public Map<String, Integer> getStoredItemsCount() {
return Collections.emptyMap();
}
}

View File

@@ -0,0 +1,77 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal.influx2;
import static com.influxdb.query.dsl.functions.restriction.Restrictions.measurement;
import static org.openhab.persistence.influxdb.internal.InfluxDBConstants.*;
import static org.openhab.persistence.influxdb.internal.InfluxDBStateConvertUtils.stateToObject;
import java.time.temporal.ChronoUnit;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.persistence.influxdb.internal.FilterCriteriaQueryCreator;
import org.openhab.persistence.influxdb.internal.InfluxDBVersion;
import com.influxdb.query.dsl.Flux;
import com.influxdb.query.dsl.functions.RangeFlux;
import com.influxdb.query.dsl.functions.restriction.Restrictions;
/**
* Implementation of {@link FilterCriteriaQueryCreator} for InfluxDB 2.0
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class Influx2FilterCriteriaQueryCreatorImpl implements FilterCriteriaQueryCreator {
@Override
public String createQuery(FilterCriteria criteria, String retentionPolicy) {
Flux flux = Flux.from(retentionPolicy);
if (criteria.getBeginDate() != null || criteria.getEndDate() != null) {
RangeFlux range = flux.range();
if (criteria.getBeginDate() != null) {
range = range.withStart(criteria.getBeginDate().toInstant());
}
if (criteria.getEndDate() != null) {
range = range.withStop(criteria.getEndDate().toInstant());
}
flux = range;
} else {
flux = flux.range(-100L, ChronoUnit.YEARS); // Flux needs a mandatory range
}
if (criteria.getItemName() != null) {
flux = flux.filter(measurement().equal(criteria.getItemName()));
}
if (criteria.getState() != null && criteria.getOperator() != null) {
Restrictions restrictions = Restrictions.and(Restrictions.field().equal(FIELD_VALUE_NAME),
Restrictions.value().custom(stateToObject(criteria.getState()),
getOperationSymbol(criteria.getOperator(), InfluxDBVersion.V2)));
flux = flux.filter(restrictions);
}
if (criteria.getOrdering() != null) {
boolean desc = criteria.getOrdering() == FilterCriteria.Ordering.DESCENDING;
flux = flux.sort().withDesc(desc).withColumns(new String[] { COLUMN_TIME_NAME_V2 });
}
if (criteria.getPageSize() != Integer.MAX_VALUE) {
flux = flux.limit(criteria.getPageSize()).withPropertyValue("offset",
criteria.getPageNumber() * criteria.getPageSize());
}
return flux.toString();
}
}

View File

@@ -0,0 +1,230 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal.influx2;
import static org.openhab.persistence.influxdb.internal.InfluxDBConstants.*;
import java.time.Instant;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.persistence.influxdb.internal.InfluxDBConfiguration;
import org.openhab.persistence.influxdb.internal.InfluxDBConstants;
import org.openhab.persistence.influxdb.internal.InfluxDBRepository;
import org.openhab.persistence.influxdb.internal.InfluxPoint;
import org.openhab.persistence.influxdb.internal.InfluxRow;
import org.openhab.persistence.influxdb.internal.UnnexpectedConditionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.influxdb.client.InfluxDBClient;
import com.influxdb.client.InfluxDBClientFactory;
import com.influxdb.client.InfluxDBClientOptions;
import com.influxdb.client.QueryApi;
import com.influxdb.client.WriteApi;
import com.influxdb.client.domain.Ready;
import com.influxdb.client.domain.WritePrecision;
import com.influxdb.client.write.Point;
import com.influxdb.query.FluxTable;
/**
* Implementation of {@link InfluxDBRepository} for InfluxDB 2.0
*
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class InfluxDB2RepositoryImpl implements InfluxDBRepository {
private final Logger logger = LoggerFactory.getLogger(InfluxDB2RepositoryImpl.class);
private InfluxDBConfiguration configuration;
@Nullable
private InfluxDBClient client;
@Nullable
private QueryApi queryAPI;
@Nullable
private WriteApi writeAPI;
public InfluxDB2RepositoryImpl(InfluxDBConfiguration configuration) {
this.configuration = configuration;
}
/**
* Returns if the client has been successfully connected to server
*
* @return True if it's connected, otherwise false
*/
@Override
public boolean isConnected() {
return client != null;
}
/**
* Connect to InfluxDB server
*
* @return True if successful, otherwise false
*/
@Override
public boolean connect() {
InfluxDBClientOptions.Builder optionsBuilder = InfluxDBClientOptions.builder().url(configuration.getUrl())
.org(configuration.getDatabaseName()).bucket(configuration.getRetentionPolicy());
char[] token = configuration.getTokenAsCharArray();
if (token.length > 0) {
optionsBuilder.authenticateToken(token);
} else {
optionsBuilder.authenticate(configuration.getUser(), configuration.getPassword().toCharArray());
}
InfluxDBClientOptions clientOptions = optionsBuilder.build();
final InfluxDBClient createdClient = InfluxDBClientFactory.create(clientOptions);
this.client = createdClient;
logger.debug("Succesfully connected to InfluxDB. Instance ready={}", createdClient.ready());
queryAPI = createdClient.getQueryApi();
writeAPI = createdClient.getWriteApi();
return checkConnectionStatus();
}
/**
* Disconnect from InfluxDB server
*/
@Override
public void disconnect() {
final InfluxDBClient currentClient = this.client;
if (currentClient != null) {
currentClient.close();
}
this.client = null;
}
/**
* Check if connection is currently ready
*
* @return True if its ready, otherwise false
*/
@Override
public boolean checkConnectionStatus() {
final InfluxDBClient currentClient = client;
if (currentClient != null) {
Ready ready = currentClient.ready();
boolean isUp = ready != null && ready.getStatus() == Ready.StatusEnum.READY;
if (isUp) {
logger.debug("database status is OK");
} else {
logger.warn("database not ready");
}
return isUp;
} else {
logger.warn("checkConnection: database is not connected");
return false;
}
}
/**
* Write point to database
*
* @param point
*/
@Override
public void write(InfluxPoint point) {
final WriteApi currentWriteAPI = writeAPI;
if (currentWriteAPI != null) {
currentWriteAPI.writePoint(convertPointToClientFormat(point));
} else {
logger.warn("Write point {} ignored due to writeAPI isn't present", point);
}
}
private Point convertPointToClientFormat(InfluxPoint point) {
Point clientPoint = Point.measurement(point.getMeasurementName()).time(point.getTime(), WritePrecision.MS);
setPointValue(point.getValue(), clientPoint);
point.getTags().entrySet().forEach(e -> clientPoint.addTag(e.getKey(), e.getValue()));
return clientPoint;
}
private void setPointValue(@Nullable Object value, Point point) {
if (value instanceof String) {
point.addField(FIELD_VALUE_NAME, (String) value);
} else if (value instanceof Number) {
point.addField(FIELD_VALUE_NAME, (Number) value);
} else if (value instanceof Boolean) {
point.addField(FIELD_VALUE_NAME, (Boolean) value);
} else if (value == null) {
point.addField(FIELD_VALUE_NAME, (String) null);
} else {
throw new UnnexpectedConditionException("Not expected value type");
}
}
/**
* Executes Flux query
*
* @param query Query
* @return Query results
*/
@Override
public List<InfluxRow> query(String query) {
final QueryApi currentQueryAPI = queryAPI;
if (currentQueryAPI != null) {
List<FluxTable> clientResult = currentQueryAPI.query(query);
return convertClientResutToRepository(clientResult);
} else {
logger.warn("Returning empty list because queryAPI isn't present");
return Collections.emptyList();
}
}
private List<InfluxRow> convertClientResutToRepository(List<FluxTable> clientResult) {
return clientResult.stream().flatMap(this::mapRawResultToHistoric).collect(Collectors.toList());
}
private Stream<InfluxRow> mapRawResultToHistoric(FluxTable rawRow) {
return rawRow.getRecords().stream().map(r -> {
String itemName = (String) r.getValueByKey(InfluxDBConstants.TAG_ITEM_NAME);
Object value = r.getValueByKey(COLUMN_VALUE_NAME_V2);
Instant time = (Instant) r.getValueByKey(COLUMN_TIME_NAME_V2);
return new InfluxRow(time, itemName, value);
});
}
/**
* Return all stored item names with it's count of stored points
*
* @return Map with <ItemName,ItemCount> entries
*/
@Override
public Map<String, Integer> getStoredItemsCount() {
final QueryApi currentQueryAPI = queryAPI;
if (currentQueryAPI != null) {
Map<String, Integer> result = new LinkedHashMap<>();
// Query wrote by hand https://github.com/influxdata/influxdb-client-java/issues/75
String query = "from(bucket: \"" + configuration.getRetentionPolicy() + "\")\n"
+ " |> range(start:-365d)\n" + " |> filter(fn: (r) => exists r." + TAG_ITEM_NAME + " )\n"
+ " |> group(columns: [\"" + TAG_ITEM_NAME + "\"], mode:\"by\")\n" + " |> count()\n"
+ " |> group()";
List<FluxTable> queryResult = currentQueryAPI.query(query);
queryResult.stream().findFirst().orElse(new FluxTable()).getRecords().forEach(row -> {
result.put((String) row.getValueByKey(TAG_ITEM_NAME), ((Number) row.getValue()).intValue());
});
return result;
} else {
logger.warn("Returning empty result because queryAPI isn't present");
return Collections.emptyMap();
}
}
}

View File

@@ -0,0 +1,106 @@
<?xml version="1.0" encoding="UTF-8"?>
<config-description:config-descriptions
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:config-description="https://openhab.org/schemas/config-description/v1.0.0"
xsi:schemaLocation="https://openhab.org/schemas/config-description/v1.0.0
https://openhab.org/schemas/config-description-1.0.0.xsd">
<config-description uri="persistence:influxdb">
<parameter-group name="connection">
<label>Connection</label>
<description>This group defines connection parameters.</description>
<advanced>false</advanced>
</parameter-group>
<parameter-group name="tags">
<label>Additional Tags</label>
<description>This group defines additional tags which can be added.</description>
<advanced>false</advanced>
</parameter-group>
<parameter-group name="misc">
<label>Miscellaneous</label>
<description>This group defines miscellaneous parameters.</description>
<advanced>false</advanced>
</parameter-group>
<parameter name="url" type="text" required="true" groupName="connection">
<context>url</context>
<label>Database URL</label>
<description>The database URL, e.g. http://127.0.0.1:8086 or http://127.0.0.1:9999</description>
<default>http://127.0.0.1:8086</default>
</parameter>
<parameter name="version" type="text" required="true" groupName="connection">
<label>Database Version</label>
<description>InfluxDB version</description>
<default>V1</default>
<options>
<option value="V1">InfluxDB 1</option>
<option value="V2">InfluxDB 2</option>
</options>
</parameter>
<parameter name="user" type="text" required="true" groupName="connection">
<label>Username</label>
<description>Database username</description>
<default>openhab</default>
</parameter>
<parameter name="password" type="text" required="false" groupName="connection">
<context>password</context>
<label>Database Password</label>
<description>Database password</description>
</parameter>
<parameter name="token" type="text" required="false" groupName="connection">
<label>Authentication Token</label>
<description>The token to authenticate to database (alternative to username/password for InfluxDB 2.0)
</description>
</parameter>
<parameter name="db" type="text" required="true" groupName="connection">
<label>Database/Organization</label>
<description>The name of the database (InfluxDB 1.0) or Organization for (InfluxDB 2.0)</description>
<default>openhab</default>
</parameter>
<parameter name="retentionPolicy" type="text" required="true" groupName="connection">
<label>Retention Policy / Bucket</label>
<description>The name of the retention policy (Influx DB 1.0) or bucket (InfluxDB 2.0) to write data
</description>
<default>openhab</default>
</parameter>
<parameter name="replaceUnderscore" type="boolean" required="true" groupName="misc">
<label>Replace Underscore</label>
<description>Whether underscores "_" in item names should be replaced by a dot "." ("test_item" ->
"test.item"). Only
for measurement name, not for tags. Also applies to alias names.
</description>
<default>false</default>
</parameter>
<parameter name="addCategoryTag" type="boolean" required="true" groupName="tags">
<label>Add Category Tag</label>
<description>Should the category of the item be included as tag "category"? If no category is set, "n/a" is
used.
</description>
<default>false</default>
</parameter>
<parameter name="addTypeTag" type="boolean" required="true" groupName="tags">
<label>Add Type Tag</label>
<description>Should the item type be included as tag "type"?</description>
<default>false</default>
</parameter>
<parameter name="addLabelTag" type="boolean" required="true" groupName="tags">
<label>Add Label Tag</label>
<description>Should the item label be included as tag "label"? If no label is set, "n/a" is used.
</description>
<default>false</default>
</parameter>
</config-description>
</config-description:config-descriptions>

View File

@@ -0,0 +1,52 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import static org.openhab.persistence.influxdb.internal.InfluxDBConfiguration.DATABASE_PARAM;
import static org.openhab.persistence.influxdb.internal.InfluxDBConfiguration.RETENTION_POLICY_PARAM;
import static org.openhab.persistence.influxdb.internal.InfluxDBConfiguration.TOKEN_PARAM;
import static org.openhab.persistence.influxdb.internal.InfluxDBConfiguration.URL_PARAM;
import static org.openhab.persistence.influxdb.internal.InfluxDBConfiguration.VERSION_PARAM;
import java.util.HashMap;
import java.util.Map;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
/**
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class ConfigurationTestHelper {
public static Map<String, @Nullable Object> createValidConfigurationParameters() {
Map<String, @Nullable Object> config = new HashMap<>();
config.put(URL_PARAM, "http://localhost:9999");
config.put(VERSION_PARAM, InfluxDBVersion.V2.name());
config.put(TOKEN_PARAM, "sampletoken");
config.put(DATABASE_PARAM, "openhab");
config.put(RETENTION_POLICY_PARAM, "default");
return config;
}
public static InfluxDBConfiguration createValidConfiguration() {
return new InfluxDBConfiguration(createValidConfigurationParameters());
}
public static Map<String, @Nullable Object> createInvalidConfigurationParameters() {
Map<String, @Nullable Object> config = createValidConfigurationParameters();
config.remove(TOKEN_PARAM);
return config;
}
}

View File

@@ -0,0 +1,107 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import java.util.Map;
import org.eclipse.jdt.annotation.DefaultLocation;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.openhab.core.items.ItemRegistry;
import org.openhab.core.items.MetadataRegistry;
import org.openhab.persistence.influxdb.InfluxDBPersistenceService;
/**
* @author Joan Pujol Espinar - Initial contribution
*/
@ExtendWith(MockitoExtension.class)
@NonNullByDefault(value = { DefaultLocation.PARAMETER, DefaultLocation.RETURN_TYPE })
public class InfluxDBPersistenceServiceTest {
private InfluxDBPersistenceService instance;
private @Mock InfluxDBRepository influxDBRepository;
private Map<String, @Nullable Object> validConfig;
private Map<String, @Nullable Object> invalidConfig;
@BeforeEach
public void before() {
instance = new InfluxDBPersistenceService(mock(ItemRegistry.class), mock(MetadataRegistry.class)) {
@Override
protected InfluxDBRepository createInfluxDBRepository() {
return influxDBRepository;
}
};
validConfig = ConfigurationTestHelper.createValidConfigurationParameters();
invalidConfig = ConfigurationTestHelper.createInvalidConfigurationParameters();
}
@AfterEach
public void after() {
validConfig = null;
invalidConfig = null;
instance = null;
influxDBRepository = null;
}
@Test
public void activateWithValidConfigShouldConnectRepository() {
instance.activate(validConfig);
verify(influxDBRepository).connect();
}
@Test
public void activateWithInvalidConfigShouldNotConnectRepository() {
instance.activate(invalidConfig);
verify(influxDBRepository, never()).connect();
}
@Test
public void activateWithNullConfigShouldNotConnectRepository() {
instance.activate(null);
verify(influxDBRepository, never()).connect();
}
@Test
public void deactivateShouldDisconnectRepository() {
instance.activate(validConfig);
instance.deactivate();
verify(influxDBRepository).disconnect();
}
@Test
public void storeItemWithConnectedRepository() {
instance.activate(validConfig);
when(influxDBRepository.isConnected()).thenReturn(true);
instance.store(ItemTestHelper.createNumberItem("number", 5));
verify(influxDBRepository).write(any());
}
@Test
public void storeItemWithDisconnectedRepositoryIsIgnored() {
instance.activate(validConfig);
when(influxDBRepository.isConnected()).thenReturn(false);
instance.store(ItemTestHelper.createNumberItem("number", 5));
verify(influxDBRepository, never()).write(any());
}
}

View File

@@ -0,0 +1,88 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import java.math.BigDecimal;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.junit.jupiter.api.Test;
import org.openhab.core.library.items.ContactItem;
import org.openhab.core.library.items.DateTimeItem;
import org.openhab.core.library.items.NumberItem;
import org.openhab.core.library.items.SwitchItem;
import org.openhab.core.library.types.DateTimeType;
import org.openhab.core.library.types.DecimalType;
import org.openhab.core.library.types.OnOffType;
import org.openhab.core.library.types.OpenClosedType;
/**
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class InfluxDBStateConvertUtilsTest {
@Test
public void convertDecimalState() {
DecimalType decimalType = new DecimalType(new BigDecimal("1.12"));
assertThat((Double) InfluxDBStateConvertUtils.stateToObject(decimalType), closeTo(1.12, 0.01));
}
@Test
public void convertOnOffState() {
assertThat(InfluxDBStateConvertUtils.stateToObject(OpenClosedType.OPEN), equalTo(1));
assertThat(InfluxDBStateConvertUtils.stateToObject(OnOffType.ON), equalTo(1));
}
@Test
public void convertDateTimeState() {
ZonedDateTime now = ZonedDateTime.now();
long nowInMillis = now.toInstant().toEpochMilli();
DateTimeType type = new DateTimeType(now);
assertThat(InfluxDBStateConvertUtils.stateToObject(type), equalTo(nowInMillis));
}
@Test
public void convertDecimalToState() {
BigDecimal val = new BigDecimal("1.12");
NumberItem item = new NumberItem("name");
assertThat(InfluxDBStateConvertUtils.objectToState(val, item), equalTo(new DecimalType(val)));
}
@Test
public void convertOnOffToState() {
boolean val1 = true;
int val2 = 1;
SwitchItem onOffItem = new SwitchItem("name");
ContactItem contactItem = new ContactItem("name");
assertThat(InfluxDBStateConvertUtils.objectToState(val1, onOffItem), equalTo(OnOffType.ON));
assertThat(InfluxDBStateConvertUtils.objectToState(val2, onOffItem), equalTo(OnOffType.ON));
assertThat(InfluxDBStateConvertUtils.objectToState(val1, contactItem), equalTo(OpenClosedType.OPEN));
assertThat(InfluxDBStateConvertUtils.objectToState(val2, contactItem), equalTo(OpenClosedType.OPEN));
}
@Test
public void convertDateTimeToState() {
long val = System.currentTimeMillis();
DateTimeItem item = new DateTimeItem("name");
DateTimeType expected = new DateTimeType(
ZonedDateTime.ofInstant(Instant.ofEpochMilli(val), ZoneId.systemDefault()));
assertThat(InfluxDBStateConvertUtils.objectToState(val, item), equalTo(expected));
}
}

View File

@@ -0,0 +1,172 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import org.eclipse.jdt.annotation.DefaultLocation;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.openhab.core.library.types.PercentType;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.persistence.influxdb.internal.influx1.Influx1FilterCriteriaQueryCreatorImpl;
import org.openhab.persistence.influxdb.internal.influx2.Influx2FilterCriteriaQueryCreatorImpl;
/**
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault({ DefaultLocation.RETURN_TYPE, DefaultLocation.PARAMETER })
public class InfluxFilterCriteriaQueryCreatorImplTest {
private static final String RETENTION_POLICY = "origin";
public static final String ITEM_NAME = "sampleItem";
private static final DateTimeFormatter INFLUX2_DATE_FORMATTER = DateTimeFormatter
.ofPattern("yyyy-MM-dd'T'HH:mm:ss.nnnnnnnnn'Z'").withZone(ZoneId.of("UTC"));
private Influx1FilterCriteriaQueryCreatorImpl instanceV1;
private Influx2FilterCriteriaQueryCreatorImpl instanceV2;
@BeforeEach
public void before() {
instanceV1 = new Influx1FilterCriteriaQueryCreatorImpl();
instanceV2 = new Influx2FilterCriteriaQueryCreatorImpl();
}
@AfterEach
public void after() {
instanceV1 = null;
instanceV2 = null;
}
@Test
public void testSimpleItemQueryWithoutParams() {
FilterCriteria criteria = createBaseCriteria();
String queryV1 = instanceV1.createQuery(criteria, RETENTION_POLICY);
assertThat(queryV1, equalTo("SELECT value FROM origin.sampleItem;"));
String queryV2 = instanceV2.createQuery(criteria, RETENTION_POLICY);
assertThat(queryV2, equalTo("from(bucket:\"origin\")\n\t" + "|> range(start:-100y)\n\t"
+ "|> filter(fn: (r) => r[\"_measurement\"] == \"sampleItem\")"));
}
@Test
public void testEscapeSimpleItem() {
FilterCriteria criteria = createBaseCriteria("sample.Item");
String queryV1 = instanceV1.createQuery(criteria, RETENTION_POLICY);
assertThat(queryV1, equalTo("SELECT value FROM origin.\"sample.Item\";"));
String queryV2 = instanceV2.createQuery(criteria, RETENTION_POLICY);
assertThat(queryV2, equalTo("from(bucket:\"origin\")\n\t" + "|> range(start:-100y)\n\t"
+ "|> filter(fn: (r) => r[\"_measurement\"] == \"sample.Item\")"));
}
@Test
public void testSimpleUnboundedItemWithoutParams() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOrdering(null);
String queryV1 = instanceV1.createQuery(criteria, RETENTION_POLICY);
assertThat(queryV1, equalTo("SELECT value FROM origin./.*/;"));
String queryV2 = instanceV2.createQuery(criteria, RETENTION_POLICY);
assertThat(queryV2, equalTo("from(bucket:\"origin\")\n\t" + "|> range(start:-100y)"));
}
@Test
public void testRangeCriteria() {
FilterCriteria criteria = createBaseCriteria();
ZonedDateTime now = ZonedDateTime.now();
ZonedDateTime tomorrow = now.plus(1, ChronoUnit.DAYS);
criteria.setBeginDate(now);
criteria.setEndDate(tomorrow);
String queryV1 = instanceV1.createQuery(criteria, RETENTION_POLICY);
String expectedQueryV1 = String.format(
"SELECT value FROM origin.sampleItem WHERE time >= '%s' AND time <= '%s';", now.toInstant(),
tomorrow.toInstant());
assertThat(queryV1, equalTo(expectedQueryV1));
String queryV2 = instanceV2.createQuery(criteria, RETENTION_POLICY);
String expectedQueryV2 = String.format(
"from(bucket:\"origin\")\n\t" + "|> range(start:%s, stop:%s)\n\t"
+ "|> filter(fn: (r) => r[\"_measurement\"] == \"sampleItem\")",
INFLUX2_DATE_FORMATTER.format(now.toInstant()), INFLUX2_DATE_FORMATTER.format(tomorrow.toInstant()));
assertThat(queryV2, equalTo(expectedQueryV2));
}
@Test
public void testValueOperator() {
FilterCriteria criteria = createBaseCriteria();
criteria.setOperator(FilterCriteria.Operator.LTE);
criteria.setState(new PercentType(90));
String query = instanceV1.createQuery(criteria, RETENTION_POLICY);
assertThat(query, equalTo("SELECT value FROM origin.sampleItem WHERE value <= 90;"));
String queryV2 = instanceV2.createQuery(criteria, RETENTION_POLICY);
assertThat(queryV2,
equalTo("from(bucket:\"origin\")\n\t" + "|> range(start:-100y)\n\t"
+ "|> filter(fn: (r) => r[\"_measurement\"] == \"sampleItem\")\n\t"
+ "|> filter(fn: (r) => (r[\"_field\"] == \"value\" and r[\"_value\"] <= 90))"));
}
@Test
public void testPagination() {
FilterCriteria criteria = createBaseCriteria();
criteria.setPageNumber(2);
criteria.setPageSize(10);
String query = instanceV1.createQuery(criteria, RETENTION_POLICY);
assertThat(query, equalTo("SELECT value FROM origin.sampleItem LIMIT 10 OFFSET 20;"));
String queryV2 = instanceV2.createQuery(criteria, RETENTION_POLICY);
assertThat(queryV2, equalTo("from(bucket:\"origin\")\n\t" + "|> range(start:-100y)\n\t"
+ "|> filter(fn: (r) => r[\"_measurement\"] == \"sampleItem\")\n\t" + "|> limit(n:10, offset:20)"));
}
@Test
public void testOrdering() {
FilterCriteria criteria = createBaseCriteria();
criteria.setOrdering(FilterCriteria.Ordering.ASCENDING);
String query = instanceV1.createQuery(criteria, RETENTION_POLICY);
assertThat(query, equalTo("SELECT value FROM origin.sampleItem ORDER BY time ASC;"));
String queryV2 = instanceV2.createQuery(criteria, RETENTION_POLICY);
assertThat(queryV2,
equalTo("from(bucket:\"origin\")\n\t" + "|> range(start:-100y)\n\t"
+ "|> filter(fn: (r) => r[\"_measurement\"] == \"sampleItem\")\n\t"
+ "|> sort(desc:false, columns:[\"_time\"])"));
}
private FilterCriteria createBaseCriteria() {
return createBaseCriteria(ITEM_NAME);
}
private FilterCriteria createBaseCriteria(String sampleItem) {
FilterCriteria criteria = new FilterCriteria();
criteria.setItemName(sampleItem);
criteria.setOrdering(null);
return criteria;
}
}

View File

@@ -0,0 +1,30 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.openhab.core.library.items.NumberItem;
import org.openhab.core.library.types.DecimalType;
/**
* @author Joan Pujol Espinar - Initial contribution
*/
@NonNullByDefault
public class ItemTestHelper {
public static NumberItem createNumberItem(String name, int value) {
NumberItem numberItem = new NumberItem(name);
numberItem.setState(new DecimalType(value));
return numberItem;
}
}

View File

@@ -0,0 +1,135 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.influxdb.internal;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import static org.mockito.Mockito.when;
import java.math.BigInteger;
import java.util.Map;
import org.eclipse.jdt.annotation.DefaultLocation;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.openhab.core.items.Metadata;
import org.openhab.core.items.MetadataKey;
import org.openhab.core.items.MetadataRegistry;
import org.openhab.core.library.items.NumberItem;
import org.openhab.persistence.influxdb.InfluxDBPersistenceService;
/**
* @author Joan Pujol Espinar - Initial contribution
*/
@ExtendWith(MockitoExtension.class)
@SuppressWarnings("null") // In case of any NPE it will cause test fail that it's the expected result
@NonNullByDefault(value = { DefaultLocation.PARAMETER, DefaultLocation.RETURN_TYPE })
public class ItemToStorePointCreatorTest {
private @Mock InfluxDBConfiguration influxDBConfiguration;
private @Mock MetadataRegistry metadataRegistry;
private ItemToStorePointCreator instance;
@BeforeEach
public void before() {
when(influxDBConfiguration.isAddCategoryTag()).thenReturn(false);
when(influxDBConfiguration.isAddLabelTag()).thenReturn(false);
when(influxDBConfiguration.isAddTypeTag()).thenReturn(false);
when(influxDBConfiguration.isReplaceUnderscore()).thenReturn(false);
instance = new ItemToStorePointCreator(influxDBConfiguration, metadataRegistry);
}
@AfterEach
public void after() {
instance = null;
influxDBConfiguration = null;
metadataRegistry = null;
}
@Test
public void convertBasicItem() {
NumberItem item = ItemTestHelper.createNumberItem("myitem", 5);
InfluxPoint point = instance.convert(item, null);
assertThat(point.getMeasurementName(), equalTo(item.getName()));
assertThat("Must Store item name", point.getTags(), hasEntry("item", item.getName()));
assertThat(point.getValue(), equalTo(new BigInteger("5")));
}
@Test
public void shouldUseAliasAsMeasurementNameIfProvided() {
NumberItem item = ItemTestHelper.createNumberItem("myitem", 5);
InfluxPoint point = instance.convert(item, "aliasName");
assertThat(point.getMeasurementName(), is("aliasName"));
}
@Test
public void shouldStoreCategoryTagIfProvidedAndConfigured() {
NumberItem item = ItemTestHelper.createNumberItem("myitem", 5);
item.setCategory("categoryValue");
when(influxDBConfiguration.isAddCategoryTag()).thenReturn(true);
InfluxPoint point = instance.convert(item, null);
assertThat(point.getTags(), hasEntry(InfluxDBConstants.TAG_CATEGORY_NAME, "categoryValue"));
when(influxDBConfiguration.isAddCategoryTag()).thenReturn(false);
point = instance.convert(item, null);
assertThat(point.getTags(), not(hasKey(InfluxDBConstants.TAG_CATEGORY_NAME)));
}
@Test
public void shouldStoreTypeTagIfProvidedAndConfigured() {
NumberItem item = ItemTestHelper.createNumberItem("myitem", 5);
when(influxDBConfiguration.isAddTypeTag()).thenReturn(true);
InfluxPoint point = instance.convert(item, null);
assertThat(point.getTags(), hasEntry(InfluxDBConstants.TAG_TYPE_NAME, "Number"));
when(influxDBConfiguration.isAddTypeTag()).thenReturn(false);
point = instance.convert(item, null);
assertThat(point.getTags(), not(hasKey(InfluxDBConstants.TAG_TYPE_NAME)));
}
@Test
public void shouldStoreTypeLabelIfProvidedAndConfigured() {
NumberItem item = ItemTestHelper.createNumberItem("myitem", 5);
item.setLabel("ItemLabel");
when(influxDBConfiguration.isAddLabelTag()).thenReturn(true);
InfluxPoint point = instance.convert(item, null);
assertThat(point.getTags(), hasEntry(InfluxDBConstants.TAG_LABEL_NAME, "ItemLabel"));
when(influxDBConfiguration.isAddLabelTag()).thenReturn(false);
point = instance.convert(item, null);
assertThat(point.getTags(), not(hasKey(InfluxDBConstants.TAG_LABEL_NAME)));
}
@Test
public void shouldStoreMetadataAsTagsIfProvided() {
NumberItem item = ItemTestHelper.createNumberItem("myitem", 5);
MetadataKey metadataKey = new MetadataKey(InfluxDBPersistenceService.SERVICE_NAME, item.getName());
when(metadataRegistry.get(metadataKey))
.thenReturn(new Metadata(metadataKey, "", Map.of("key1", "val1", "key2", "val2")));
InfluxPoint point = instance.convert(item, null);
assertThat(point.getTags(), hasEntry("key1", "val1"));
assertThat(point.getTags(), hasEntry("key2", "val2"));
}
}