Codebase as of c53e4aed26 as an initial commit for the shrunk repo

Signed-off-by: Kai Kreuzer <kai@openhab.org>
This commit is contained in:
Kai Kreuzer
2010-02-20 19:23:32 +01:00
committed by Kai Kreuzer
commit bbf1a7fd29
302 changed files with 29726 additions and 0 deletions

View File

@@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="test" value="true"/>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="output" path="target/classes"/>
</classpath>

View File

@@ -0,0 +1,2 @@
/build/
/drivers/

View File

@@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>org.openhab.persistence.jdbc</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
</natures>
</projectDescription>

View File

@@ -0,0 +1,13 @@
This content is produced and maintained by the openHAB project.
* Project home: https://www.openhab.org
== Declared Project Licenses
This program and the accompanying materials are made available under the terms
of the Eclipse Public License 2.0 which is available at
https://www.eclipse.org/legal/epl-2.0/.
== Source Code
https://github.com/openhab/openhab-addons

View File

@@ -0,0 +1,174 @@
# JDBC Persistence
This service writes and reads item states to and from a number of relational database systems that support [Java Database Connectivity (JDBC)](https://en.wikipedia.org/wiki/Java_Database_Connectivity).
This service allows you to persist state updates using one of several different underlying database services.
It is designed for a maximum of scalability, to store very large amounts of data and still over the years not lose its speed.
The generic design makes it relatively easy for developers to integrate other databases that have JDBC drivers.
The following databases are currently supported and tested:
| Database | Tested Driver / Version |
| -------------------------------------------- | ------------------------------------------------------------ |
| [Apache Derby](https://db.apache.org/derby/) | [derby-10.12.1.1.jar](https://mvnrepository.com/artifact/org.apache.derby/derby) |
| [H2](https://www.h2database.com/) | [h2-1.4.191.jar](https://mvnrepository.com/artifact/com.h2database/h2) |
| [HSQLDB](http://hsqldb.org/) | [hsqldb-2.3.3.jar](https://mvnrepository.com/artifact/org.hsqldb/hsqldb) |
| [MariaDB](https://mariadb.org/) | [mariadb-java-client-1.4.6.jar](https://mvnrepository.com/artifact/org.mariadb.jdbc/mariadb-java-client) |
| [MySQL](https://www.mysql.com/) | [mysql-connector-java-5.1.39.jar](https://mvnrepository.com/artifact/mysql/mysql-connector-java) |
| [PostgreSQL](https://www.postgresql.org/) | [postgresql-9.4.1209.jre7.jar](https://mvnrepository.com/artifact/org.postgresql/postgresql) |
| [SQLite](https://www.sqlite.org/) | [sqlite-jdbc-3.16.1.jar](https://mvnrepository.com/artifact/org.xerial/sqlite-jdbc) |
## Table of Contents
<!-- MarkdownTOC -->
- [Configuration](#configuration)
- [Minimal Configuration](#minimal-configuration)
- [Migration from MySQL to JDBC Persistence Services](#migration-from-mysql-to-jdbc-persistence-services)
- [Technical Notes](#technical-notes)
- [Database Table Schema](#database-table-schema)
- [Number Precision](#number-precision)
- [Rounding results](#rounding-results)
- [For Developers](#for-developers)
- [Performance Tests](#performance-tests)
<!-- /MarkdownTOC -->
## Configuration
This service can be configured in the file `services/jdbc.cfg`.
| Property | Default | Required | Description |
| ------------------------- | ------------------------------------------------------------ | :-------: | ------------------------------------------------------------ |
| url | | Yes | JDBC URL to establish a connection to your database. Examples:<br/><br/>`jdbc:derby:./testDerby;create=true`<br/>`jdbc:h2:./testH2`<br/>`jdbc:hsqldb:./testHsqlDb`<br/>`jdbc:mariadb://192.168.0.1:3306/testMariadb`<br/>`jdbc:mysql://192.168.0.1:3306/testMysql?serverTimezone=UTC`<br/>`jdbc:postgresql://192.168.0.1:5432/testPostgresql`<br/>`jdbc:sqlite:./testSqlite.db`.<br/><br/>If no database is available it will be created; for example the url `jdbc:h2:./testH2` creates a new H2 database in openHAB folder. Example to create your own MySQL database directly:<br/><br/>`CREATE DATABASE 'yourDB' CHARACTER SET utf8 COLLATE utf8_general_ci;` |
| user | | if needed | database user name |
| password | | if needed | database user password |
| errReconnectThreshold | 0 | No | when the service is deactivated (0 means ignore) |
| sqltype.CALL | `VARCHAR(200)` | No | All `sqlType` options allow you to change the SQL data type used to store values for different openHAB item states. See the following links for further information: [mybatis](https://mybatis.github.io/mybatis-3/apidocs/reference/org/apache/ibatis/type/JdbcType.html) [H2](http://www.h2database.com/html/datatypes.html) [PostgresSQL](http://www.postgresql.org/docs/9.3/static/datatype.html) |
| sqltype.COLOR | `VARCHAR(70)` | No | see above |
| sqltype.CONTACT | `VARCHAR(6)` | No | see above |
| sqltype.DATETIME | `DATETIME` | No | see above |
| sqltype.DIMMER | `TINYINT` | No | see above |
| sqltype.LOCATION | `VARCHAR(30)` | No | see above |
| sqltype.NUMBER | `DOUBLE` | No | see above |
| sqltype.ROLLERSHUTTER | `TINYINT` | No | see above |
| sqltype.STRING | `VARCHAR(65500)` | No | see above |
| sqltype.SWITCH | `VARCHAR(6)` | No | see above |
| sqltype.tablePrimaryKey | `TIMESTAMP` | No | type of `time` column for newly created item tables |
| sqltype.tablePrimaryValue | `NOW()` | No | value of `time` column for newly inserted rows |
| numberDecimalcount | 3 | No | for Itemtype "Number" default decimal digit count |
| tableNamePrefix | `item` | No | table name prefix. For Migration from MySQL Persistence, set to `Item`. |
| tableUseRealItemNames | `false` | No | table name prefix generation. When set to `true`, real item names are used for table names and `tableNamePrefix` is ignored. When set to `false`, the `tableNamePrefix` is used to generate table names with sequential numbers. |
| tableIdDigitCount | 4 | No | when `tableUseRealItemNames` is `false` and thus table names are generated sequentially, this controls how many zero-padded digits are used in the table name. With the default of 4, the first table name will end with `0001`. For migration from the MySQL persistence service, set this to 0. |
| rebuildTableNames | false | No | rename existing tables using `tableUseRealItemNames` and `tableIdDigitCount`. USE WITH CARE! Deactivate after Renaming is done! |
| jdbc.maximumPoolSize | configured per database in package `org.openhab.persistence.jdbc.db.*` | No | Some embeded databases can handle only one connection. See [this link](https://github.com/brettwooldridge/HikariCP/issues/256) for more information |
| jdbc.minimumIdle | see above | No | see above |
| enableLogTime | `false` | No | timekeeping |
All item- and event-related configuration is done in the file `persistence/jdbc.persist`.
To configure this service as the default persistence service for openHAB 2, add or change the line
```
org.openhab.core.persistence:default=jdbc
```
in the file `services/runtime.cfg`.
### Minimal Configuration
services/jdbc.cfg
```
url=jdbc:postgresql://192.168.0.1:5432/testPostgresql
```
### Migration from MySQL to JDBC Persistence Services
The JDBC Persistence service can act as a replacement for the MySQL Persistence service.
Here is an example of a configuration for a MySQL database named `testMysql` with user `test` and password `test`:
services/jdbc.cfg
```
url=jdbc:mysql://192.168.0.1:3306/testMysql
user=test
password=test
tableNamePrefix=Item
tableUseRealItemNames=false
tableIdDigitCount=0
```
Remember to install and uninstall the services you want, and rename `persistence/mysql.persist` to `persistence/jdbc.persist`.
## Technical Notes
### Database Table Schema
The table name schema can be reconfigured after creation, if needed.
The service will create a mapping table to link each item to a table, and a separate table is generated for each item.
The item data tables include time and data values.
The SQL data type used depends on the openHAB item type, and allows the item state to be recovered back into openHAB in the same way it was stored.
With this *per-item* layout, the scalability and easy maintenance of the database is ensured, even if large amounts of data must be managed.
To rename existing tables, use the parameters `tableUseRealItemNames` and `tableIdDigitCount` in the configuration.
### Number Precision
Default openHAB number items are persisted with SQL datatype `double`.
Internally openHAB uses `BigDecimal`.
If better numerical precision is needed, for example set `sqltype.NUMBER = DECIMAL(max digits, max decimals)`, then on the Java side, the service works with `BigDecimal` without type conversion.
If more come decimals as `max decimals` provides, this persisted value is rounded mathematically correctly.
The SQL types `DECIMAL` or `NUMERIC` are precise, but to work with `DOUBLE` is faster.
### Rounding results
The results of database queries of number items are rounded to three decimal places by default.
With `numberDecimalcount` decimals can be changed.
Especially if sql types `DECIMAL` or `NUMERIC` are used for `sqltype.NUMBER`, rounding can be disabled by setting `numberDecimalcount=-1`.
### For Developers
* Clearly separated source files for the database-specific part of openHAB logic.
* Code duplication by similar services is prevented.
* Integrating a new SQL and JDBC enabled database is fairly simple.
### Performance Tests
Not necessarily representative of the performance you may experience.
| DATABASE | FIRST RUN | AVERAGE | FASTEST | SIZE AFTER | COMMENT |
| ---------- | --------: | ------: | ------: | ---------: | -------------- |
| Derby | 7.829 | 6.892 | 5.381 | 5.36 MB | local embedded |
| H2 | 1.797 | 2.080 | 1.580 | 0.96 MB | local embedded |
| hsqldb | 3.474 | 2.104 | 1.310 | 1.23 MB | local embedded |
| mysql | 11.873 | 11.524 | 10.971 | - | ext. Server VM |
| postgresql | 8.147 | 7.072 | 6.895 | - | ext. Server VM |
| sqlite | 2.406 | 1.249 | 1.137 | 0.28 MB | local embedded |
* Each test ran about 20 Times every 30 seconds.
* openHAB 1.x has ready started for about a Minute.
* the data in seconds for the evaluation are from the console output.
Used a script like this:
```
var count = 0;
rule "DB STRESS TEST"
when
Time cron "30 * * * * ?"
then
if( count = 24) count = 0
count = count+1
if( count > 3 && count < 23){
for( var i=500; i>1; i=i-1){
postUpdate( NUMBERITEM, i)
SWITCHITEM.previousState().state
postUpdate( DIMMERITEM, OFF)
NUMBERITEM.changedSince( now().minusMinutes(1))
postUpdate( DIMMERITEM, ON)
}
}
end
```

View File

@@ -0,0 +1,93 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.openhab.addons.bundles</groupId>
<artifactId>org.openhab.addons.reactor.bundles</artifactId>
<version>3.0.0-SNAPSHOT</version>
</parent>
<artifactId>org.openhab.persistence.jdbc</artifactId>
<name>openHAB Add-ons :: Bundles :: Persistence Service :: JDBC</name>
<properties>
<bnd.importpackage>!org.osgi.service.jdbc.*,!sun.security.*,!org.apache.lucene.*,!org.apache.logging.log4j,!waffle.windows.auth.*,!org.hibernate.*,!org.jboss.*,!org.codehaus.groovy.*,!com.codahale.metrics.*,!com.google.protobuf.*,!com.ibm.icu.*,!com.ibm.jvm.*,!com.mchange.*,!com.sun.*,!com.vividsolutions.*,!io.prometheus.*,com.mysql.jdbc;resolution:=optional,org.apache.derby.*;resolution:=optional,org.h2;resolution:=optional,org.h2.jdbcx;resolution:=optional,org.hsqldb;resolution:=optional,org.hsqldb.jdbc;resolution:=optional,org.mariadb.jdbc;resolution:=optional,org.postgresql;resolution:=optional,org.sqlite;resolution:=optional,org.sqlite.jdbc4;resolution:=optional,javassist*;resolution:=optional</bnd.importpackage>
<dep.noembedding>derby,h2,hsqldb,mariadb-java-client,mysql-connector-java,postgresql,sqlite-jdbc</dep.noembedding>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<hikari.version>2.4.7</hikari.version>
<dbutils.version>1.6</dbutils.version>
<yank.version>3.2.0</yank.version>
<!-- JDBC database driver versions -->
<derby.version>10.12.1.1</derby.version>
<h2.version>1.4.191</h2.version>
<hsqldb.version>2.3.3</hsqldb.version>
<mariadb.version>1.3.5</mariadb.version>
<mysql.version>8.0.13</mysql.version>
<postgresql.version>9.4.1212</postgresql.version>
<sqlite.version>3.16.1</sqlite.version>
</properties>
<dependencies>
<dependency>
<groupId>commons-dbutils</groupId>
<artifactId>commons-dbutils</artifactId>
<version>${dbutils.version}</version>
</dependency>
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
<version>${hikari.version}</version>
</dependency>
<dependency>
<groupId>org.knowm</groupId>
<artifactId>yank</artifactId>
<version>${yank.version}</version>
</dependency>
<!-- DB dependencies -->
<dependency>
<groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId>
<version>${derby.version}</version>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>${h2.version}</version>
</dependency>
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<version>${hsqldb.version}</version>
</dependency>
<dependency>
<groupId>org.mariadb.jdbc</groupId>
<artifactId>mariadb-java-client</artifactId>
<version>${mariadb.version}</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.version}</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${postgresql.version}</version>
</dependency>
<dependency>
<groupId>org.xerial</groupId>
<artifactId>sqlite-jdbc</artifactId>
<version>${sqlite.version}</version>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,55 @@
<?xml version="1.0" encoding="UTF-8"?>
<features name="org.openhab.persistence.jdbc-${project.version}" xmlns="http://karaf.apache.org/xmlns/features/v1.4.0">
<repository>mvn:org.openhab.core.features.karaf/org.openhab.core.features.karaf.openhab-core/${ohc.version}/xml/features</repository>
<!-- JDBC Persistence for: Apache Derby, H2, HSQLDB, MariaDB, MySQL, PostgreSQL, SQLite -->
<feature name="openhab-persistence-jdbc-derby" description="JDBC Persistence Apache Derby" version="${project.version}">
<configfile finalname="${openhab.conf}/services/jdbc.cfg" override="false">mvn:${project.groupId}/openhab-addons-external3/${project.version}/cfg/jdbc</configfile>
<feature prerequisite="false" dependency="false">openhab-runtime-base</feature>
<bundle start-level="80">mvn:org.apache.derby/derbyclient/${derby.version}</bundle>
<bundle start-level="80">mvn:org.openhab.addons.bundles/org.openhab.persistence.jdbc/${project.version}</bundle>
</feature>
<feature name="openhab-persistence-jdbc-h2" description="JDBC Persistence H2" version="${project.version}">
<configfile finalname="${openhab.conf}/services/jdbc.cfg" override="false">mvn:${project.groupId}/openhab-addons-external3/${project.version}/cfg/jdbc</configfile>
<feature prerequisite="false" dependency="false">openhab-runtime-base</feature>
<bundle start-level="80">mvn:com.h2database/h2/${h2.version}</bundle>
<bundle start-level="80">mvn:org.openhab.addons.bundles/org.openhab.persistence.jdbc/${project.version}</bundle>
</feature>
<feature name="openhab-persistence-jdbc-hsqldb" description="JDBC Persistence HSQLDB" version="${project.version}">
<configfile finalname="${openhab.conf}/services/jdbc.cfg" override="false">mvn:${project.groupId}/openhab-addons-external3/${project.version}/cfg/jdbc</configfile>
<feature prerequisite="false" dependency="false">openhab-runtime-base</feature>
<bundle start-level="80">mvn:org.hsqldb/hsqldb/${hsqldb.version}</bundle>
<bundle start-level="80">mvn:org.openhab.addons.bundles/org.openhab.persistence.jdbc/${project.version}</bundle>
</feature>
<feature name="openhab-persistence-jdbc-mariadb" description="JDBC Persistence MariaDB" version="${project.version}">
<configfile finalname="${openhab.conf}/services/jdbc.cfg" override="false">mvn:${project.groupId}/openhab-addons-external3/${project.version}/cfg/jdbc</configfile>
<feature prerequisite="false" dependency="false">openhab-runtime-base</feature>
<bundle start-level="80">mvn:org.mariadb.jdbc/mariadb-java-client/${mariadb.version}</bundle>
<bundle start-level="80">mvn:org.openhab.addons.bundles/org.openhab.persistence.jdbc/${project.version}</bundle>
</feature>
<feature name="openhab-persistence-jdbc-mysql" description="JDBC Persistence MySQL" version="${project.version}">
<configfile finalname="${openhab.conf}/services/jdbc.cfg" override="false">mvn:${project.groupId}/openhab-addons-external3/${project.version}/cfg/jdbc</configfile>
<feature prerequisite="false" dependency="false">openhab-runtime-base</feature>
<bundle start-level="80">mvn:mysql/mysql-connector-java/${mysql.version}</bundle>
<bundle start-level="80">mvn:org.openhab.addons.bundles/org.openhab.persistence.jdbc/${project.version}</bundle>
</feature>
<feature name="openhab-persistence-jdbc-postgresql" description="JDBC Persistence PostgreSQL" version="${project.version}">
<configfile finalname="${openhab.conf}/services/jdbc.cfg" override="false">mvn:${project.groupId}/openhab-addons-external3/${project.version}/cfg/jdbc</configfile>
<feature prerequisite="false" dependency="false">openhab-runtime-base</feature>
<bundle start-level="80">mvn:org.postgresql/postgresql/${postgresql.version}</bundle>
<bundle start-level="80">mvn:org.openhab.addons.bundles/org.openhab.persistence.jdbc/${project.version}</bundle>
</feature>
<feature name="openhab-persistence-jdbc-sqlite" description="JDBC Persistence SQLite" version="${project.version}">
<configfile finalname="${openhab.conf}/services/jdbc.cfg" override="false">mvn:${project.groupId}/openhab-addons-external3/${project.version}/cfg/jdbc</configfile>
<feature prerequisite="false" dependency="false">openhab-runtime-base</feature>
<bundle start-level="80">mvn:org.xerial/sqlite-jdbc/${sqlite.version}</bundle>
<bundle start-level="80">mvn:org.openhab.addons.bundles/org.openhab.persistence.jdbc/${project.version}</bundle>
</feature>
</features>

View File

@@ -0,0 +1,563 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.db;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.knowm.yank.Yank;
import org.openhab.core.items.GroupItem;
import org.openhab.core.items.Item;
import org.openhab.core.library.items.ColorItem;
import org.openhab.core.library.items.ContactItem;
import org.openhab.core.library.items.DateTimeItem;
import org.openhab.core.library.items.DimmerItem;
import org.openhab.core.library.items.NumberItem;
import org.openhab.core.library.items.RollershutterItem;
import org.openhab.core.library.items.StringItem;
import org.openhab.core.library.items.SwitchItem;
import org.openhab.core.library.types.DateTimeType;
import org.openhab.core.library.types.DecimalType;
import org.openhab.core.library.types.HSBType;
import org.openhab.core.library.types.OnOffType;
import org.openhab.core.library.types.OpenClosedType;
import org.openhab.core.library.types.PercentType;
import org.openhab.core.library.types.StringType;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.core.persistence.FilterCriteria.Ordering;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.core.types.State;
import org.openhab.persistence.jdbc.model.ItemVO;
import org.openhab.persistence.jdbc.model.ItemsVO;
import org.openhab.persistence.jdbc.model.JdbcHistoricItem;
import org.openhab.persistence.jdbc.utils.DbMetaData;
import org.openhab.persistence.jdbc.utils.StringUtilsExt;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Default Database Configuration class.
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class JdbcBaseDAO {
private final Logger logger = LoggerFactory.getLogger(JdbcBaseDAO.class);
public Properties databaseProps = new Properties();
protected String urlSuffix = "";
public Map<String, String> sqlTypes = new HashMap<>();
// Get Database Meta data
protected DbMetaData dbMeta;
protected String sqlPingDB;
protected String sqlGetDB;
protected String sqlIfTableExists;
protected String sqlCreateNewEntryInItemsTable;
protected String sqlCreateItemsTableIfNot;
protected String sqlDeleteItemsEntry;
protected String sqlGetItemIDTableNames;
protected String sqlGetItemTables;
protected String sqlCreateItemTable;
protected String sqlInsertItemValue;
/********
* INIT *
********/
public JdbcBaseDAO() {
initSqlTypes();
initDbProps();
initSqlQueries();
}
/**
* ## Get high precision by fractal seconds, examples ##
*
* mysql > 5.5 + mariadb > 5.2:
* DROP TABLE FractionalSeconds;
* CREATE TABLE FractionalSeconds (time TIMESTAMP(3), value TIMESTAMP(3));
* INSERT INTO FractionalSeconds (time, value) VALUES( NOW(3), '1999-01-09 20:11:11.126' );
* SELECT time FROM FractionalSeconds ORDER BY time DESC LIMIT 1;
*
* mysql <= 5.5 + mariadb <= 5.2: !!! NO high precision and fractal seconds !!!
* DROP TABLE FractionalSeconds;
* CREATE TABLE FractionalSeconds (time TIMESTAMP, value TIMESTAMP);
* INSERT INTO FractionalSeconds (time, value) VALUES( NOW(), '1999-01-09 20:11:11.126' );
* SELECT time FROM FractionalSeconds ORDER BY time DESC LIMIT 1;
*
* derby:
* DROP TABLE FractionalSeconds;
* CREATE TABLE FractionalSeconds (time TIMESTAMP, value TIMESTAMP);
* INSERT INTO FractionalSeconds (time, value) VALUES( CURRENT_TIMESTAMP, '1999-01-09 20:11:11.126' );
* SELECT time, value FROM FractionalSeconds;
*
* H2 + postgreSQL + hsqldb:
* DROP TABLE FractionalSeconds;
* CREATE TABLE FractionalSeconds (time TIMESTAMP, value TIMESTAMP);
* INSERT INTO FractionalSeconds (time, value) VALUES( NOW(), '1999-01-09 20:11:11.126' );
* SELECT time, value FROM FractionalSeconds;
*
* Sqlite:
* DROP TABLE FractionalSeconds;
* CREATE TABLE FractionalSeconds (time TIMESTAMP, value TIMESTAMP);
* INSERT INTO FractionalSeconds (time, value) VALUES( strftime('%Y-%m-%d %H:%M:%f' , 'now' , 'localtime'),
* '1999-01-09 20:11:11.124' );
* SELECT time FROM FractionalSeconds ORDER BY time DESC LIMIT 1;
*
*/
private void initSqlQueries() {
logger.debug("JDBC::initSqlQueries: '{}'", this.getClass().getSimpleName());
sqlPingDB = "SELECT 1";
sqlGetDB = "SELECT DATABASE()";
sqlIfTableExists = "SHOW TABLES LIKE '#searchTable#'";
sqlCreateNewEntryInItemsTable = "INSERT INTO #itemsManageTable# (ItemName) VALUES ('#itemname#')";
sqlCreateItemsTableIfNot = "CREATE TABLE IF NOT EXISTS #itemsManageTable# (ItemId INT NOT NULL AUTO_INCREMENT,#colname# #coltype# NOT NULL,PRIMARY KEY (ItemId))";
sqlDeleteItemsEntry = "DELETE FROM items WHERE ItemName=#itemname#";
sqlGetItemIDTableNames = "SELECT itemid, itemname FROM #itemsManageTable#";
sqlGetItemTables = "SELECT table_name FROM information_schema.tables WHERE table_type='BASE TABLE' AND table_schema='#jdbcUriDatabaseName#' AND NOT table_name='#itemsManageTable#'";
sqlCreateItemTable = "CREATE TABLE IF NOT EXISTS #tableName# (time #tablePrimaryKey# NOT NULL, value #dbType#, PRIMARY KEY(time))";
sqlInsertItemValue = "INSERT INTO #tableName# (TIME, VALUE) VALUES( #tablePrimaryValue#, ? ) ON DUPLICATE KEY UPDATE VALUE= ?";
}
/**
* INFO: http://www.java2s.com/Code/Java/Database-SQL-JDBC/StandardSQLDataTypeswithTheirJavaEquivalents.htm
*/
private void initSqlTypes() {
logger.debug("JDBC::initSqlTypes: Initialize the type array");
sqlTypes.put("CALLITEM", "VARCHAR(200)");
sqlTypes.put("COLORITEM", "VARCHAR(70)");
sqlTypes.put("CONTACTITEM", "VARCHAR(6)");
sqlTypes.put("DATETIMEITEM", "TIMESTAMP");
sqlTypes.put("DIMMERITEM", "TINYINT");
sqlTypes.put("LOCATIONITEM", "VARCHAR(30)");
sqlTypes.put("NUMBERITEM", "DOUBLE");
sqlTypes.put("ROLLERSHUTTERITEM", "TINYINT");
sqlTypes.put("STRINGITEM", "VARCHAR(65500)");// jdbc max 21845
sqlTypes.put("SWITCHITEM", "VARCHAR(6)");
sqlTypes.put("tablePrimaryKey", "TIMESTAMP");
sqlTypes.put("tablePrimaryValue", "NOW()");
}
/**
* INFO: https://github.com/brettwooldridge/HikariCP
*
* driverClassName (used with jdbcUrl):
* Derby: org.apache.derby.jdbc.EmbeddedDriver
* H2: org.h2.Driver
* HSQLDB: org.hsqldb.jdbcDriver
* Jaybird: org.firebirdsql.jdbc.FBDriver
* MariaDB: org.mariadb.jdbc.Driver
* MySQL: com.mysql.jdbc.Driver
* MaxDB: com.sap.dbtech.jdbc.DriverSapDB
* PostgreSQL: org.postgresql.Driver
* SyBase: com.sybase.jdbc3.jdbc.SybDriver
* SqLite: org.sqlite.JDBC
*
* dataSourceClassName (for alternative Configuration):
* Derby: org.apache.derby.jdbc.ClientDataSource
* H2: org.h2.jdbcx.JdbcDataSource
* HSQLDB: org.hsqldb.jdbc.JDBCDataSource
* Jaybird: org.firebirdsql.pool.FBSimpleDataSource
* MariaDB, MySQL: org.mariadb.jdbc.MySQLDataSource
* MaxDB: com.sap.dbtech.jdbc.DriverSapDB
* PostgreSQL: org.postgresql.ds.PGSimpleDataSource
* SyBase: com.sybase.jdbc4.jdbc.SybDataSource
* SqLite: org.sqlite.SQLiteDataSource
*
* HikariPool - configuration Example:
* allowPoolSuspension.............false
* autoCommit......................true
* catalog.........................
* connectionInitSql...............
* connectionTestQuery.............
* connectionTimeout...............30000
* dataSource......................
* dataSourceClassName.............
* dataSourceJNDI..................
* dataSourceProperties............{password=<masked>}
* driverClassName.................
* healthCheckProperties...........{}
* healthCheckRegistry.............
* idleTimeout.....................600000
* initializationFailFast..........true
* isolateInternalQueries..........false
* jdbc4ConnectionTest.............false
* jdbcUrl.........................jdbc:mysql://192.168.0.1:3306/test
* leakDetectionThreshold..........0
* maxLifetime.....................1800000
* maximumPoolSize.................10
* metricRegistry..................
* metricsTrackerFactory...........
* minimumIdle.....................10
* password........................<masked>
* poolName........................HikariPool-0
* readOnly........................false
* registerMbeans..................false
* scheduledExecutorService........
* threadFactory...................
* transactionIsolation............
* username........................xxxx
* validationTimeout...............5000
*/
private void initDbProps() {
// databaseProps.setProperty("dataSource.url", "jdbc:mysql://192.168.0.1:3306/test");
// databaseProps.setProperty("dataSource.user", "test");
// databaseProps.setProperty("dataSource.password", "test");
// Most relevant Performance values
// maximumPoolSize to 20, minimumIdle to 5, and idleTimeout to 2 minutes.
// databaseProps.setProperty("maximumPoolSize", ""+maximumPoolSize);
// databaseProps.setProperty("minimumIdle", ""+minimumIdle);
// databaseProps.setProperty("idleTimeout", ""+idleTimeout);
// databaseProps.setProperty("connectionTimeout",""+connectionTimeout);
// databaseProps.setProperty("idleTimeout", ""+idleTimeout);
// databaseProps.setProperty("maxLifetime", ""+maxLifetime);
// databaseProps.setProperty("validationTimeout",""+validationTimeout);
}
public void initAfterFirstDbConnection() {
logger.debug("JDBC::initAfterFirstDbConnection: Initializing step, after db is connected.");
// Initialize sqlTypes, depending on DB version for example
dbMeta = new DbMetaData();// get DB information
}
/**************
* ITEMS DAOs *
**************/
public Integer doPingDB() {
return Yank.queryScalar(sqlPingDB, Integer.class, null);
}
public String doGetDB() {
return Yank.queryScalar(sqlGetDB, String.class, null);
}
public boolean doIfTableExists(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlIfTableExists, new String[] { "#searchTable#" },
new String[] { vo.getItemsManageTable() });
logger.debug("JDBC::doIfTableExists sql={}", sql);
return Yank.queryScalar(sql, String.class, null) != null;
}
public Long doCreateNewEntryInItemsTable(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlCreateNewEntryInItemsTable,
new String[] { "#itemsManageTable#", "#itemname#" },
new String[] { vo.getItemsManageTable(), vo.getItemname() });
logger.debug("JDBC::doCreateNewEntryInItemsTable sql={}", sql);
return Yank.insert(sql, null);
}
public ItemsVO doCreateItemsTableIfNot(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlCreateItemsTableIfNot,
new String[] { "#itemsManageTable#", "#colname#", "#coltype#" },
new String[] { vo.getItemsManageTable(), vo.getColname(), vo.getColtype() });
logger.debug("JDBC::doCreateItemsTableIfNot sql={}", sql);
Yank.execute(sql, null);
return vo;
}
public void doDeleteItemsEntry(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlDeleteItemsEntry, new String[] { "#itemname#" },
new String[] { vo.getItemname() });
logger.debug("JDBC::doDeleteItemsEntry sql={}", sql);
Yank.execute(sql, null);
}
public List<ItemsVO> doGetItemIDTableNames(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlGetItemIDTableNames, new String[] { "#itemsManageTable#" },
new String[] { vo.getItemsManageTable() });
logger.debug("JDBC::doGetItemIDTableNames sql={}", sql);
return Yank.queryBeanList(sql, ItemsVO.class, null);
}
public List<ItemsVO> doGetItemTables(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlGetItemTables,
new String[] { "#jdbcUriDatabaseName#", "#itemsManageTable#" },
new String[] { vo.getJdbcUriDatabaseName(), vo.getItemsManageTable() });
logger.debug("JDBC::doGetItemTables sql={}", sql);
return Yank.queryBeanList(sql, ItemsVO.class, null);
}
/*************
* ITEM DAOs *
*************/
public void doUpdateItemTableNames(List<ItemVO> vol) {
if (!vol.isEmpty()) {
String sql = updateItemTableNamesProvider(vol);
Yank.execute(sql, null);
}
}
public void doCreateItemTable(ItemVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlCreateItemTable,
new String[] { "#tableName#", "#dbType#", "#tablePrimaryKey#" },
new String[] { vo.getTableName(), vo.getDbType(), sqlTypes.get("tablePrimaryKey") });
logger.debug("JDBC::doCreateItemTable sql={}", sql);
Yank.execute(sql, null);
}
public void doStoreItemValue(Item item, ItemVO vo) {
vo = storeItemValueProvider(item, vo);
String sql = StringUtilsExt.replaceArrayMerge(sqlInsertItemValue,
new String[] { "#tableName#", "#tablePrimaryValue#" },
new String[] { vo.getTableName(), sqlTypes.get("tablePrimaryValue") });
Object[] params = new Object[] { vo.getValue(), vo.getValue() };
logger.debug("JDBC::doStoreItemValue sql={} value='{}'", sql, vo.getValue());
Yank.execute(sql, params);
}
public List<HistoricItem> doGetHistItemFilterQuery(Item item, FilterCriteria filter, int numberDecimalcount,
String table, String name) {
String sql = histItemFilterQueryProvider(filter, numberDecimalcount, table, name);
logger.debug("JDBC::doGetHistItemFilterQuery sql={}", sql);
List<Object[]> m = Yank.queryObjectArrays(sql, null);
List<HistoricItem> items = new ArrayList<>();
for (int i = 0; i < m.size(); i++) {
items.add(new JdbcHistoricItem(item.getName(), getState(item, m.get(i)[1]), objectAsDate(m.get(i)[0])));
}
return items;
}
/*************
* Providers *
*************/
static final DateTimeFormatter JDBC_DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
private String histItemFilterQueryProvider(FilterCriteria filter, int numberDecimalcount, String table,
String simpleName) {
logger.debug(
"JDBC::getHistItemFilterQueryProvider filter = {}, numberDecimalcount = {}, table = {}, simpleName = {}",
filter.toString(), numberDecimalcount, table, simpleName);
String filterString = "";
if (filter.getBeginDate() != null) {
filterString += filterString.isEmpty() ? " WHERE" : " AND";
filterString += " TIME>'" + JDBC_DATE_FORMAT.format(filter.getBeginDate()) + "'";
}
if (filter.getEndDate() != null) {
filterString += filterString.isEmpty() ? " WHERE" : " AND";
filterString += " TIME<'" + JDBC_DATE_FORMAT.format(filter.getEndDate()) + "'";
}
filterString += (filter.getOrdering() == Ordering.ASCENDING) ? " ORDER BY time ASC" : " ORDER BY time DESC ";
if (filter.getPageSize() != 0x7fffffff) {
filterString += " LIMIT " + filter.getPageNumber() * filter.getPageSize() + "," + filter.getPageSize();
}
// SELECT time, ROUND(value,3) FROM number_item_0114 ORDER BY time DESC LIMIT 0,1
// rounding HALF UP
String queryString = "NUMBERITEM".equalsIgnoreCase(simpleName) && numberDecimalcount > -1
? "SELECT time, ROUND(value," + numberDecimalcount + ") FROM " + table
: "SELECT time, value FROM " + table;
if (!filterString.isEmpty()) {
queryString += filterString;
}
logger.debug("JDBC::query queryString = {}", queryString);
return queryString;
}
private String updateItemTableNamesProvider(List<ItemVO> namesList) {
logger.debug("JDBC::updateItemTableNamesProvider namesList.size = {}", namesList.size());
String queryString = "";
for (int i = 0; i < namesList.size(); i++) {
ItemVO it = namesList.get(i);
queryString += "ALTER TABLE " + it.getTableName() + " RENAME TO " + it.getNewTableName() + ";";
}
logger.debug("JDBC::query queryString = {}", queryString);
return queryString;
}
protected ItemVO storeItemValueProvider(Item item, ItemVO vo) {
String itemType = getItemType(item);
logger.debug("JDBC::storeItemValueProvider: item '{}' as Type '{}' in '{}' with state '{}'", item.getName(),
itemType, vo.getTableName(), item.getState().toString());
// insertItemValue
logger.debug("JDBC::storeItemValueProvider: getState: '{}'", item.getState().toString());
if ("COLORITEM".equals(itemType)) {
vo.setValueTypes(getSqlTypes().get(itemType), java.lang.String.class);
vo.setValue(item.getState().toString());
} else if ("NUMBERITEM".equals(itemType)) {
String it = getSqlTypes().get(itemType);
if (it.toUpperCase().contains("DOUBLE")) {
vo.setValueTypes(it, java.lang.Double.class);
Number newVal = ((DecimalType) item.getState());
logger.debug("JDBC::storeItemValueProvider: newVal.doubleValue: '{}'", newVal.doubleValue());
vo.setValue(newVal.doubleValue());
} else if (it.toUpperCase().contains("DECIMAL") || it.toUpperCase().contains("NUMERIC")) {
vo.setValueTypes(it, java.math.BigDecimal.class);
DecimalType newVal = ((DecimalType) item.getState());
logger.debug("JDBC::storeItemValueProvider: newVal.toBigDecimal: '{}'", newVal.toBigDecimal());
vo.setValue(newVal.toBigDecimal());
} else if (it.toUpperCase().contains("INT")) {
vo.setValueTypes(it, java.lang.Integer.class);
Number newVal = ((DecimalType) item.getState());
logger.debug("JDBC::storeItemValueProvider: newVal.intValue: '{}'", newVal.intValue());
vo.setValue(newVal.intValue());
} else {// fall back to String
vo.setValueTypes(it, java.lang.String.class);
logger.warn("JDBC::storeItemValueProvider: item.getState().toString(): '{}'",
item.getState().toString());
vo.setValue(item.getState().toString());
}
} else if ("ROLLERSHUTTERITEM".equals(itemType) || "DIMMERITEM".equals(itemType)) {
vo.setValueTypes(getSqlTypes().get(itemType), java.lang.Integer.class);
Number newVal = ((DecimalType) item.getState());
logger.debug("JDBC::storeItemValueProvider: newVal.intValue: '{}'", newVal.intValue());
vo.setValue(newVal.intValue());
} else if ("DATETIMEITEM".equals(itemType)) {
vo.setValueTypes(getSqlTypes().get(itemType), java.sql.Timestamp.class);
java.sql.Timestamp d = new java.sql.Timestamp(
((DateTimeType) item.getState()).getZonedDateTime().toInstant().toEpochMilli());
logger.debug("JDBC::storeItemValueProvider: DateTimeItem: '{}'", d);
vo.setValue(d);
} else {
/*
* !!ATTENTION!!
*
* 1. DimmerItem.getStateAs(PercentType.class).toString() always
* returns 0
* RollershutterItem.getStateAs(PercentType.class).toString() works
* as expected
*
* 2. (item instanceof ColorItem) == (item instanceof DimmerItem) =
* true Therefore for instance tests ColorItem always has to be
* tested before DimmerItem
*
* !!ATTENTION!!
*/
// All other items should return the best format by default
vo.setValueTypes(getSqlTypes().get(itemType), java.lang.String.class);
logger.debug("JDBC::storeItemValueProvider: other: item.getState().toString(): '{}'",
item.getState().toString());
vo.setValue(item.getState().toString());
}
return vo;
}
/*****************
* H E L P E R S *
*****************/
protected State getState(Item item, Object v) {
String clazz = v.getClass().getSimpleName();
logger.debug("JDBC::ItemResultHandler::handleResult getState value = '{}', getClass = '{}', clazz = '{}'",
v.toString(), v.getClass(), clazz);
if (item instanceof NumberItem) {
String it = getSqlTypes().get("NUMBERITEM");
if (it.toUpperCase().contains("DOUBLE")) {
return new DecimalType(((Number) v).doubleValue());
} else if (it.toUpperCase().contains("DECIMAL") || it.toUpperCase().contains("NUMERIC")) {
return new DecimalType((BigDecimal) v);
} else if (it.toUpperCase().contains("INT")) {
return new DecimalType(((Integer) v).intValue());
}
return DecimalType.valueOf(((String) v).toString());
} else if (item instanceof ColorItem) {
return HSBType.valueOf(((String) v).toString());
} else if (item instanceof DimmerItem) {
return new PercentType(objectAsInteger(v));
} else if (item instanceof SwitchItem) {
return OnOffType.valueOf(((String) v).toString().trim());
} else if (item instanceof ContactItem) {
return OpenClosedType.valueOf(((String) v).toString().trim());
} else if (item instanceof RollershutterItem) {
return new PercentType(objectAsInteger(v));
} else if (item instanceof DateTimeItem) {
return new DateTimeType(
ZonedDateTime.ofInstant(Instant.ofEpochMilli(objectAsLong(v)), ZoneId.systemDefault()));
} else if (item instanceof StringItem) {
return StringType.valueOf(((String) v).toString());
} else {// Call, Location, String
return StringType.valueOf(((String) v).toString());
}
}
protected ZonedDateTime objectAsDate(Object v) {
if (v instanceof java.lang.String) {
return ZonedDateTime.ofInstant(Timestamp.valueOf(v.toString()).toInstant(), ZoneId.systemDefault());
}
return ZonedDateTime.ofInstant(((Timestamp) v).toInstant(), ZoneId.systemDefault());
}
protected Long objectAsLong(Object v) {
if (v instanceof Long) {
return ((Number) v).longValue();
} else if (v instanceof java.sql.Date) {
return ((java.sql.Date) v).getTime();
}
return ((java.sql.Timestamp) v).getTime();
}
protected Integer objectAsInteger(Object v) {
if (v instanceof Byte) {
return ((Byte) v).intValue();
}
return ((Integer) v).intValue();
}
public String getItemType(Item i) {
Item item = i;
String def = "STRINGITEM";
if (i instanceof GroupItem) {
item = ((GroupItem) i).getBaseItem();
if (item == null) {
// if GroupItem:<ItemType> is not defined in
// *.items using StringType
// logger.debug("JDBC: BaseItem GroupItem:<ItemType> is not
// defined in *.items searching for first Member and try to use
// as ItemType");
logger.debug(
"JDBC::getItemType: Cannot detect ItemType for {} because the GroupItems' base type isn't set in *.items File.",
i.getName());
item = ((GroupItem) i).getMembers().iterator().next();
if (item == null) {
logger.debug(
"JDBC::getItemType: No ItemType found for first Child-Member of GroupItem {}, use ItemType for STRINGITEM as Fallback",
i.getName());
return def;
}
}
}
String itemType = item.getClass().getSimpleName().toUpperCase();
logger.debug("JDBC::getItemType: Try to use ItemType {} for Item {}", itemType, i.getName());
if (sqlTypes.get(itemType) == null) {
logger.warn(
"JDBC::getItemType: No sqlType found for ItemType {}, use ItemType for STRINGITEM as Fallback for {}",
itemType, i.getName());
return def;
}
return itemType;
}
/******************************
* public Getters and Setters *
******************************/
public Map<String, String> getSqlTypes() {
return sqlTypes;
}
public String getDataType(Item item) {
return sqlTypes.get(getItemType(item));
}
}

View File

@@ -0,0 +1,243 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.db;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import org.knowm.yank.Yank;
import org.openhab.core.items.Item;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.core.persistence.FilterCriteria.Ordering;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.persistence.jdbc.model.ItemVO;
import org.openhab.persistence.jdbc.model.ItemsVO;
import org.openhab.persistence.jdbc.model.JdbcHistoricItem;
import org.openhab.persistence.jdbc.utils.StringUtilsExt;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extended Database Configuration class. Class represents
* the extended database-specific configuration. Overrides and supplements the
* default settings from JdbcBaseDAO. Enter only the differences to JdbcBaseDAO here.
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class JdbcDerbyDAO extends JdbcBaseDAO {
private final Logger logger = LoggerFactory.getLogger(JdbcDerbyDAO.class);
/********
* INIT *
********/
public JdbcDerbyDAO() {
super();
initSqlTypes();
initDbProps();
initSqlQueries();
}
private void initSqlQueries() {
logger.debug("JDBC::initSqlQueries: '{}'", this.getClass().getSimpleName());
sqlPingDB = "values 1";
sqlGetDB = "VALUES SYSCS_UTIL.SYSCS_GET_DATABASE_PROPERTY( 'DataDictionaryVersion' )"; // returns version
sqlIfTableExists = "SELECT * FROM SYS.SYSTABLES WHERE TABLENAME='#searchTable#'";
sqlCreateItemsTableIfNot = "CREATE TABLE #itemsManageTable# ( ItemId INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), #colname# #coltype# NOT NULL)";
sqlCreateItemTable = "CREATE TABLE #tableName# (time #tablePrimaryKey# NOT NULL, value #dbType#, PRIMARY KEY(time))";
// Prevent error against duplicate time value (seldom): No powerful Merge found:
// http://www.codeproject.com/Questions/162627/how-to-insert-new-record-in-my-table-if-not-exists
sqlInsertItemValue = "INSERT INTO #tableName# (TIME, VALUE) VALUES( #tablePrimaryValue#, CAST( ? as #dbType#) )";
}
private void initSqlTypes() {
sqlTypes.put("DATETIMEITEM", "TIMESTAMP");
sqlTypes.put("DIMMERITEM", "SMALLINT");
sqlTypes.put("ROLLERSHUTTERITEM", "SMALLINT");
sqlTypes.put("STRINGITEM", "VARCHAR(32000)");
sqlTypes.put("tablePrimaryValue", "CURRENT_TIMESTAMP");
logger.debug("JDBC::initSqlTypes: Initialized the type array sqlTypes={}", sqlTypes.values());
}
/**
* INFO: https://github.com/brettwooldridge/HikariCP
*/
private void initDbProps() {
// Properties for HikariCP
// Use driverClassName
databaseProps.setProperty("driverClassName", "org.apache.derby.jdbc.EmbeddedDriver");
// OR dataSourceClassName
// databaseProps.setProperty("dataSourceClassName", "org.apache.derby.jdbc.EmbeddedDataSource");
databaseProps.setProperty("maximumPoolSize", "1");
databaseProps.setProperty("minimumIdle", "1");
}
@Override
public void initAfterFirstDbConnection() {
logger.debug("JDBC::initAfterFirstDbConnection: Initializing step, after db is connected.");
// Initialize sqlTypes, depending on DB version for example
// derby does not like this... dbMeta = new DbMetaData();// get DB information
}
/**************
* ITEMS DAOs *
**************/
@Override
public Integer doPingDB() {
return Yank.queryScalar(sqlPingDB, Integer.class, null);
}
@Override
public boolean doIfTableExists(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlIfTableExists, new String[] { "#searchTable#" },
new String[] { vo.getItemsManageTable().toUpperCase() });
logger.debug("JDBC::doIfTableExists sql={}", sql);
return Yank.queryScalar(sql, String.class, null) != null;
}
@Override
public Long doCreateNewEntryInItemsTable(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlCreateNewEntryInItemsTable,
new String[] { "#itemsManageTable#", "#itemname#" },
new String[] { vo.getItemsManageTable().toUpperCase(), vo.getItemname() });
logger.debug("JDBC::doCreateNewEntryInItemsTable sql={}", sql);
return Yank.insert(sql, null);
}
@Override
public ItemsVO doCreateItemsTableIfNot(ItemsVO vo) {
// boolean tableExists = Yank.queryScalar(SQL_IF_TABLE_EXISTS.replace("#searchTable#",
// vo.getItemsManageTable().toUpperCase()), String.class, null) == null;
boolean tableExists = doIfTableExists(vo);
if (!tableExists) {
String sql = StringUtilsExt.replaceArrayMerge(sqlCreateItemsTableIfNot,
new String[] { "#itemsManageTable#", "#colname#", "#coltype#" },
new String[] { vo.getItemsManageTable().toUpperCase(), vo.getColname(), vo.getColtype() });
logger.debug("JDBC::doCreateItemsTableIfNot tableExists={} therefore sql={}", tableExists, sql);
Yank.execute(sql, null);
} else {
logger.debug("JDBC::doCreateItemsTableIfNot tableExists={}, did not CREATE TABLE", tableExists);
}
return vo;
}
/*************
* ITEM DAOs *
*************/
@Override
public void doCreateItemTable(ItemVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlCreateItemTable,
new String[] { "#tableName#", "#dbType#", "#tablePrimaryKey#" },
new String[] { vo.getTableName(), vo.getDbType(), sqlTypes.get("tablePrimaryKey") });
Yank.execute(sql, null);
}
@Override
public void doStoreItemValue(Item item, ItemVO vo) {
vo = storeItemValueProvider(item, vo);
String sql = StringUtilsExt.replaceArrayMerge(sqlInsertItemValue,
new String[] { "#tableName#", "#dbType#", "#tablePrimaryValue#" },
new String[] { vo.getTableName().toUpperCase(), vo.getDbType(), sqlTypes.get("tablePrimaryValue") });
Object[] params = new Object[] { vo.getValue() };
logger.debug("JDBC::doStoreItemValue sql={} value='{}'", sql, vo.getValue());
Yank.execute(sql, params);
}
@Override
public List<HistoricItem> doGetHistItemFilterQuery(Item item, FilterCriteria filter, int numberDecimalcount,
String table, String name) {
String sql = histItemFilterQueryProvider(filter, numberDecimalcount, table, name);
List<Object[]> m = Yank.queryObjectArrays(sql, null);
logger.debug("JDBC::doGetHistItemFilterQuery got Array length={}", m.size());
List<HistoricItem> items = new ArrayList<>();
for (int i = 0; i < m.size(); i++) {
logger.debug("JDBC::doGetHistItemFilterQuery 0='{}' 1='{}'", m.get(i)[0], m.get(i)[1]);
items.add(new JdbcHistoricItem(item.getName(), getState(item, m.get(i)[1]), objectAsDate(m.get(i)[0])));
}
return items;
}
/****************************
* SQL generation Providers *
****************************/
static final DateTimeFormatter JDBC_DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
/**
* @param filter
* @param numberDecimalcount
* @param table
* @return
*/
private String histItemFilterQueryProvider(FilterCriteria filter, int numberDecimalcount, String table,
String simpleName) {
logger.debug(
"JDBC::getHistItemFilterQueryProvider filter = {}, numberDecimalcount = {}, table = {}, simpleName = {}",
StringUtilsExt.filterToString(filter), numberDecimalcount, table, simpleName);
String filterString = "";
if (filter.getBeginDate() != null) {
filterString += filterString.isEmpty() ? " WHERE" : " AND";
filterString += " TIME>'" + JDBC_DATE_FORMAT.format(filter.getBeginDate()) + "'";
}
if (filter.getEndDate() != null) {
filterString += filterString.isEmpty() ? " WHERE" : " AND";
filterString += " TIME<'" + JDBC_DATE_FORMAT.format(filter.getEndDate()) + "'";
}
filterString += (filter.getOrdering() == Ordering.ASCENDING) ? " ORDER BY time ASC" : " ORDER BY time DESC";
if (filter.getPageSize() != 0x7fffffff) {
// TODO: TESTING!!!
// filterString += " LIMIT " + filter.getPageNumber() *
// filter.getPageSize() + "," + filter.getPageSize();
// SELECT time, value FROM ohscriptfiles_sw_ace_paths_0001 ORDER BY
// time DESC OFFSET 1 ROWS FETCH NEXT 0 ROWS ONLY
// filterString += " OFFSET " + filter.getPageSize() +" ROWS FETCH
// FIRST||NEXT " + filter.getPageNumber() * filter.getPageSize() + "
// ROWS ONLY";
filterString += " OFFSET " + filter.getPageSize() + " ROWS FETCH FIRST "
+ (filter.getPageNumber() * filter.getPageSize() + 1) + " ROWS ONLY";
}
// http://www.seemoredata.com/en/showthread.php?132-Round-function-in-Apache-Derby
// simulated round function in Derby: CAST(value 0.0005 AS DECIMAL(15,3))
// simulated round function in Derby: "CAST(value 0.0005 AS DECIMAL(15,"+numberDecimalcount+"))"
String queryString = "SELECT time,";
if ("NUMBERITEM".equalsIgnoreCase(simpleName) && numberDecimalcount > -1) {
// rounding HALF UP
queryString += "CAST(value 0.";
for (int i = 0; i < numberDecimalcount; i++) {
queryString += "0";
}
queryString += "5 AS DECIMAL(31," + numberDecimalcount + "))"; // 31 is DECIMAL max precision
// https://db.apache.org/derby/docs/10.0/manuals/develop/develop151.html
} else {
queryString += " value FROM " + table.toUpperCase();
}
if (!filterString.isEmpty()) {
queryString += filterString;
}
logger.debug("JDBC::query queryString = {}", queryString);
return queryString;
}
/*****************
* H E L P E R S *
*****************/
/******************************
* public Getters and Setters *
******************************/
}

View File

@@ -0,0 +1,95 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.db;
import org.knowm.yank.Yank;
import org.openhab.core.items.Item;
import org.openhab.persistence.jdbc.model.ItemVO;
import org.openhab.persistence.jdbc.utils.StringUtilsExt;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extended Database Configuration class. Class represents
* the extended database-specific configuration. Overrides and supplements the
* default settings from JdbcBaseDAO. Enter only the differences to JdbcBaseDAO here.
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class JdbcH2DAO extends JdbcBaseDAO {
private final Logger logger = LoggerFactory.getLogger(JdbcH2DAO.class);
/********
* INIT *
********/
public JdbcH2DAO() {
super();
initSqlQueries();
initSqlTypes();
initDbProps();
}
private void initSqlQueries() {
logger.debug("JDBC::initSqlQueries: '{}'", this.getClass().getSimpleName());
sqlIfTableExists = "SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME='#searchTable#'";
// SQL_INSERT_ITEM_VALUE = "INSERT INTO #tableName# (TIME, VALUE) VALUES( NOW(), CAST( ? as #dbType#) )";
// http://stackoverflow.com/questions/19768051/h2-sql-database-insert-if-the-record-does-not-exist
sqlInsertItemValue = "MERGE INTO #tableName# (TIME, VALUE) VALUES( #tablePrimaryValue#, CAST( ? as #dbType#) )";
}
/**
* INFO: http://www.java2s.com/Code/Java/Database-SQL-JDBC/StandardSQLDataTypeswithTheirJavaEquivalents.htm
*/
private void initSqlTypes() {
}
/**
* INFO: https://github.com/brettwooldridge/HikariCP
*/
private void initDbProps() {
// Properties for HikariCP
databaseProps.setProperty("driverClassName", "org.h2.Driver");
// driverClassName OR BETTER USE dataSourceClassName
// databaseProps.setProperty("dataSourceClassName", "org.h2.jdbcx.JdbcDataSource");
}
/**************
* ITEMS DAOs *
**************/
/*************
* ITEM DAOs *
*************/
@Override
public void doStoreItemValue(Item item, ItemVO vo) {
vo = storeItemValueProvider(item, vo);
String sql = StringUtilsExt.replaceArrayMerge(sqlInsertItemValue,
new String[] { "#tableName#", "#dbType#", "#tablePrimaryValue#" },
new String[] { vo.getTableName(), vo.getDbType(), sqlTypes.get("tablePrimaryValue") });
Object[] params = new Object[] { vo.getValue() };
logger.debug("JDBC::doStoreItemValue sql={} value='{}'", sql, vo.getValue());
Yank.execute(sql, params);
}
/****************************
* SQL generation Providers *
****************************/
/*****************
* H E L P E R S *
*****************/
/******************************
* public Getters and Setters *
******************************/
}

View File

@@ -0,0 +1,125 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.db;
import org.knowm.yank.Yank;
import org.openhab.core.items.Item;
import org.openhab.persistence.jdbc.model.ItemVO;
import org.openhab.persistence.jdbc.model.ItemsVO;
import org.openhab.persistence.jdbc.utils.StringUtilsExt;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extended Database Configuration class. Class represents
* the extended database-specific configuration. Overrides and supplements the
* default settings from JdbcBaseDAO. Enter only the differences to JdbcBaseDAO here.
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class JdbcHsqldbDAO extends JdbcBaseDAO {
private final Logger logger = LoggerFactory.getLogger(JdbcHsqldbDAO.class);
/********
* INIT *
********/
public JdbcHsqldbDAO() {
super();
initSqlQueries();
initSqlTypes();
initDbProps();
}
private void initSqlQueries() {
logger.debug("JDBC::initSqlQueries: '{}'", this.getClass().getSimpleName());
// http://hsqldb.org/doc/guide/builtinfunctions-chapt.html
sqlPingDB = "SELECT 1 FROM INFORMATION_SCHEMA.SYSTEM_USERS";
sqlGetDB = "SELECT DATABASE () FROM INFORMATION_SCHEMA.SYSTEM_USERS";
sqlIfTableExists = "SELECT * FROM INFORMATION_SCHEMA.SYSTEM_TABLES WHERE TABLE_NAME='#searchTable#'";
sqlCreateItemsTableIfNot = "CREATE TABLE IF NOT EXISTS #itemsManageTable# ( ItemId INT GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1) NOT NULL, #colname# #coltype# NOT NULL)";
sqlCreateNewEntryInItemsTable = "INSERT INTO #itemsManageTable# (ItemName) VALUES ('#itemname#')";
// Prevent error against duplicate time value
// http://hsqldb.org/doc/guide/dataaccess-chapt.html#dac_merge_statement
// SQL_INSERT_ITEM_VALUE = "INSERT INTO #tableName# (TIME, VALUE) VALUES( NOW(), CAST( ? as #dbType#) )";
sqlInsertItemValue = "MERGE INTO #tableName# "
+ "USING (VALUES #tablePrimaryValue#, CAST( ? as #dbType#)) temp (TIME, VALUE) ON (#tableName#.TIME=temp.TIME) "
+ "WHEN NOT MATCHED THEN INSERT (TIME, VALUE) VALUES (temp.TIME, temp.VALUE)";
}
/**
* INFO: http://www.java2s.com/Code/Java/Database-SQL-JDBC/StandardSQLDataTypeswithTheirJavaEquivalents.htm
*/
private void initSqlTypes() {
}
/**
* INFO: https://github.com/brettwooldridge/HikariCP
*/
private void initDbProps() {
// Properties for HikariCP
databaseProps.setProperty("driverClassName", "org.hsqldb.jdbcDriver");
}
/**************
* ITEMS DAOs *
**************/
@Override
public Integer doPingDB() {
return Yank.queryScalar(sqlPingDB, Integer.class, null);
}
@Override
public ItemsVO doCreateItemsTableIfNot(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlCreateItemsTableIfNot,
new String[] { "#itemsManageTable#", "#colname#", "#coltype#", "#itemsManageTable#" },
new String[] { vo.getItemsManageTable(), vo.getColname(), vo.getColtype(), vo.getItemsManageTable() });
logger.debug("JDBC::doCreateItemsTableIfNot sql={}", sql);
Yank.execute(sql, null);
return vo;
}
@Override
public Long doCreateNewEntryInItemsTable(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlCreateNewEntryInItemsTable,
new String[] { "#itemsManageTable#", "#itemname#" },
new String[] { vo.getItemsManageTable(), vo.getItemname() });
logger.debug("JDBC::doCreateNewEntryInItemsTable sql={}", sql);
return Yank.insert(sql, null);
}
/*************
* ITEM DAOs *
*************/
@Override
public void doStoreItemValue(Item item, ItemVO vo) {
vo = storeItemValueProvider(item, vo);
String sql = StringUtilsExt.replaceArrayMerge(sqlInsertItemValue,
new String[] { "#tableName#", "#dbType#", "#tableName#", "#tablePrimaryValue#" }, new String[] {
vo.getTableName(), vo.getDbType(), vo.getTableName(), sqlTypes.get("tablePrimaryValue") });
Object[] params = new Object[] { vo.getValue() };
logger.debug("JDBC::doStoreItemValue sql={} value='{}'", sql, vo.getValue());
Yank.execute(sql, params);
}
/****************************
* SQL generation Providers *
****************************/
/*****************
* H E L P E R S *
*****************/
/******************************
* public Getters and Setters *
******************************/
}

View File

@@ -0,0 +1,106 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.db;
import org.knowm.yank.Yank;
import org.openhab.persistence.jdbc.utils.DbMetaData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extended Database Configuration class. Class represents
* the extended database-specific configuration. Overrides and supplements the
* default settings from JdbcBaseDAO. Enter only the differences to JdbcBaseDAO here.
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class JdbcMariadbDAO extends JdbcBaseDAO {
private final Logger logger = LoggerFactory.getLogger(JdbcMariadbDAO.class);
/********
* INIT *
********/
public JdbcMariadbDAO() {
super();
initSqlTypes();
initDbProps();
initSqlQueries();
}
private void initSqlQueries() {
logger.debug("JDBC::initSqlQueries: '{}'", this.getClass().getSimpleName());
}
/**
* INFO: http://www.java2s.com/Code/Java/Database-SQL-JDBC/StandardSQLDataTypeswithTheirJavaEquivalents.htm
*/
private void initSqlTypes() {
logger.debug("JDBC::initSqlTypes: Initialize the type array");
}
/**
* INFO: https://github.com/brettwooldridge/HikariCP
*/
private void initDbProps() {
// Performancetuning
databaseProps.setProperty("dataSource.cachePrepStmts", "true");
databaseProps.setProperty("dataSource.prepStmtCacheSize", "250");
databaseProps.setProperty("dataSource.prepStmtCacheSqlLimit", "2048");
databaseProps.setProperty("dataSource.jdbcCompliantTruncation", "false");// jdbc standard max varchar max length
// of 21845
// Properties for HikariCP
// Use driverClassName
databaseProps.setProperty("driverClassName", "org.mariadb.jdbc.Driver");
// driverClassName OR BETTER USE dataSourceClassName
// databaseProps.setProperty("dataSourceClassName", "org.mariadb.jdbc.MySQLDataSource");
databaseProps.setProperty("maximumPoolSize", "3");
databaseProps.setProperty("minimumIdle", "2");
}
@Override
public void initAfterFirstDbConnection() {
logger.debug("JDBC::initAfterFirstDbConnection: Initializing step, after db is connected.");
dbMeta = new DbMetaData();
// Initialize sqlTypes, depending on DB version for example
if (dbMeta.isDbVersionGreater(5, 1)) {
sqlTypes.put("DATETIMEITEM", "TIMESTAMP(3)");
sqlTypes.put("tablePrimaryKey", "TIMESTAMP(3)");
sqlTypes.put("tablePrimaryValue", "NOW(3)");
}
}
/**************
* ITEMS DAOs *
**************/
@Override
public Integer doPingDB() {
return Yank.queryScalar(sqlPingDB, Long.class, null).intValue();
}
/*************
* ITEM DAOs *
*************/
/****************************
* SQL generation Providers *
****************************/
/*****************
* H E L P E R S *
*****************/
/******************************
* public Getters and Setters *
******************************/
}

View File

@@ -0,0 +1,111 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.db;
import org.knowm.yank.Yank;
import org.openhab.persistence.jdbc.utils.DbMetaData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extended Database Configuration class. Class represents
* the extended database-specific configuration. Overrides and supplements the
* default settings from JdbcBaseDAO. Enter only the differences to JdbcBaseDAO here.
*
* since driver version >= 6.0 sometimes timezone conversation is needed: ?serverTimezone=UTC
* example: dbProps.setProperty("jdbcUrl", "jdbc:mysql://192.168.0.181:3306/ItemTypeTest3?serverTimezone=UTC");//mysql
* 5.7
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class JdbcMysqlDAO extends JdbcBaseDAO {
private final Logger logger = LoggerFactory.getLogger(JdbcMysqlDAO.class);
/********
* INIT *
********/
public JdbcMysqlDAO() {
super();
initSqlTypes();
initDbProps();
initSqlQueries();
}
private void initSqlQueries() {
logger.debug("JDBC::initSqlQueries: '{}'", this.getClass().getSimpleName());
}
/**
* INFO: http://www.java2s.com/Code/Java/Database-SQL-JDBC/StandardSQLDataTypeswithTheirJavaEquivalents.htm
*/
private void initSqlTypes() {
logger.debug("JDBC::initSqlTypes: Initialize the type array");
sqlTypes.put("STRINGITEM", "VARCHAR(21717)");// mysql using utf-8 max 65535/3 = 21845, using 21845-128 = 21717
}
/**
* INFO: https://github.com/brettwooldridge/HikariCP
*/
private void initDbProps() {
// Performancetuning
databaseProps.setProperty("dataSource.cachePrepStmts", "true");
databaseProps.setProperty("dataSource.prepStmtCacheSize", "250");
databaseProps.setProperty("dataSource.prepStmtCacheSqlLimit", "2048");
databaseProps.setProperty("dataSource.jdbcCompliantTruncation", "false");// jdbc standard max varchar max length
// of 21845
// Properties for HikariCP
// Use driverClassName
databaseProps.setProperty("driverClassName", "com.mysql.jdbc.Driver");
// OR dataSourceClassName
// databaseProps.setProperty("dataSourceClassName", "com.mysql.jdbc.jdbc2.optional.MysqlDataSource");
databaseProps.setProperty("maximumPoolSize", "3");
databaseProps.setProperty("minimumIdle", "2");
}
@Override
public void initAfterFirstDbConnection() {
logger.debug("JDBC::initAfterFirstDbConnection: Initializing step, after db is connected.");
dbMeta = new DbMetaData();
// Initialize sqlTypes, depending on DB version for example
if (dbMeta.isDbVersionGreater(5, 5)) {
sqlTypes.put("DATETIMEITEM", "TIMESTAMP(3)");
sqlTypes.put("tablePrimaryKey", "TIMESTAMP(3)");
sqlTypes.put("tablePrimaryValue", "NOW(3)");
}
}
/**************
* ITEMS DAOs *
**************/
@Override
public Integer doPingDB() {
return Yank.queryScalar(sqlPingDB, Long.class, null).intValue();
}
/*************
* ITEM DAOs *
*************/
/****************************
* SQL generation Providers *
****************************/
/*****************
* H E L P E R S *
*****************/
/******************************
* public Getters and Setters *
******************************/
}

View File

@@ -0,0 +1,209 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.db;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import org.knowm.yank.Yank;
import org.openhab.core.items.Item;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.core.persistence.FilterCriteria.Ordering;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.persistence.jdbc.model.ItemVO;
import org.openhab.persistence.jdbc.model.ItemsVO;
import org.openhab.persistence.jdbc.model.JdbcHistoricItem;
import org.openhab.persistence.jdbc.utils.StringUtilsExt;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extended Database Configuration class. Class represents
* the extended database-specific configuration. Overrides and supplements the
* default settings from JdbcBaseDAO. Enter only the differences to JdbcBaseDAO here.
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class JdbcPostgresqlDAO extends JdbcBaseDAO {
private final Logger logger = LoggerFactory.getLogger(JdbcPostgresqlDAO.class);
/********
* INIT *
********/
public JdbcPostgresqlDAO() {
super();
initSqlQueries();
initSqlTypes();
initDbProps();
}
private void initSqlQueries() {
logger.debug("JDBC::initSqlQueries: '{}'", this.getClass().getSimpleName());
// System Information Functions: https://www.postgresql.org/docs/9.2/static/functions-info.html
sqlGetDB = "SELECT CURRENT_DATABASE()";
sqlIfTableExists = "SELECT * FROM PG_TABLES WHERE TABLENAME='#searchTable#'";
sqlCreateItemsTableIfNot = "CREATE TABLE IF NOT EXISTS #itemsManageTable# (itemid SERIAL NOT NULL, #colname# #coltype# NOT NULL, CONSTRAINT #itemsManageTable#_pkey PRIMARY KEY (itemid))";
sqlCreateNewEntryInItemsTable = "INSERT INTO items (itemname) SELECT itemname FROM #itemsManageTable# UNION VALUES ('#itemname#') EXCEPT SELECT itemname FROM items";
sqlGetItemTables = "SELECT table_name FROM information_schema.tables WHERE table_type='BASE TABLE' AND table_schema='public' AND NOT table_name='#itemsManageTable#'";
// http://stackoverflow.com/questions/17267417/how-do-i-do-an-upsert-merge-insert-on-duplicate-update-in-postgresql
// for later use, PostgreSql > 9.5 to prevent PRIMARY key violation use:
// SQL_INSERT_ITEM_VALUE = "INSERT INTO #tableName# (TIME, VALUE) VALUES( NOW(), CAST( ? as #dbType#) ) ON
// CONFLICT DO NOTHING";
sqlInsertItemValue = "INSERT INTO #tableName# (TIME, VALUE) VALUES( #tablePrimaryValue#, CAST( ? as #dbType#) )";
}
/**
* INFO: http://www.java2s.com/Code/Java/Database-SQL-JDBC/StandardSQLDataTypeswithTheirJavaEquivalents.htm
*/
private void initSqlTypes() {
// Initialize the type array
sqlTypes.put("CALLITEM", "VARCHAR");
sqlTypes.put("COLORITEM", "VARCHAR");
sqlTypes.put("CONTACTITEM", "VARCHAR");
sqlTypes.put("DATETIMEITEM", "TIMESTAMP");
sqlTypes.put("DIMMERITEM", "SMALLINT");
sqlTypes.put("LOCATIONITEM", "VARCHAR");
sqlTypes.put("NUMBERITEM", "DOUBLE PRECISION");
sqlTypes.put("ROLLERSHUTTERITEM", "SMALLINT");
sqlTypes.put("STRINGITEM", "VARCHAR");
sqlTypes.put("SWITCHITEM", "VARCHAR");
logger.debug("JDBC::initSqlTypes: Initialized the type array sqlTypes={}", sqlTypes.values());
}
/**
* INFO: https://github.com/brettwooldridge/HikariCP
*/
private void initDbProps() {
// Performance:
// databaseProps.setProperty("dataSource.cachePrepStmts", "true");
// databaseProps.setProperty("dataSource.prepStmtCacheSize", "250");
// databaseProps.setProperty("dataSource.prepStmtCacheSqlLimit", "2048");
// Properties for HikariCP
databaseProps.setProperty("driverClassName", "org.postgresql.Driver");
// driverClassName OR BETTER USE dataSourceClassName
// databaseProps.setProperty("dataSourceClassName", "org.postgresql.ds.PGSimpleDataSource");
// databaseProps.setProperty("maximumPoolSize", "3");
// databaseProps.setProperty("minimumIdle", "2");
}
/**************
* ITEMS DAOs *
**************/
@Override
public ItemsVO doCreateItemsTableIfNot(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlCreateItemsTableIfNot,
new String[] { "#itemsManageTable#", "#colname#", "#coltype#", "#itemsManageTable#" },
new String[] { vo.getItemsManageTable(), vo.getColname(), vo.getColtype(), vo.getItemsManageTable() });
logger.debug("JDBC::doCreateItemsTableIfNot sql={}", sql);
Yank.execute(sql, null);
return vo;
}
@Override
public Long doCreateNewEntryInItemsTable(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlCreateNewEntryInItemsTable,
new String[] { "#itemsManageTable#", "#itemname#" },
new String[] { vo.getItemsManageTable(), vo.getItemname() });
logger.debug("JDBC::doCreateNewEntryInItemsTable sql={}", sql);
return Yank.insert(sql, null);
}
@Override
public List<ItemsVO> doGetItemTables(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlGetItemTables, new String[] { "#itemsManageTable#" },
new String[] { vo.getItemsManageTable() });
logger.debug("JDBC::doGetItemTables sql={}", sql);
return Yank.queryBeanList(sql, ItemsVO.class, null);
}
/*************
* ITEM DAOs *
*************/
@Override
public void doStoreItemValue(Item item, ItemVO vo) {
vo = storeItemValueProvider(item, vo);
String sql = StringUtilsExt.replaceArrayMerge(sqlInsertItemValue,
new String[] { "#tableName#", "#dbType#", "#tablePrimaryValue#" },
new String[] { vo.getTableName(), vo.getDbType(), sqlTypes.get("tablePrimaryValue") });
Object[] params = new Object[] { vo.getValue() };
logger.debug("JDBC::doStoreItemValue sql={} value='{}'", sql, vo.getValue());
Yank.execute(sql, params);
}
@Override
public List<HistoricItem> doGetHistItemFilterQuery(Item item, FilterCriteria filter, int numberDecimalcount,
String table, String name) {
String sql = histItemFilterQueryProvider(filter, numberDecimalcount, table, name);
logger.debug("JDBC::doGetHistItemFilterQuery sql={}", sql);
List<Object[]> m = Yank.queryObjectArrays(sql, null);
List<HistoricItem> items = new ArrayList<>();
for (int i = 0; i < m.size(); i++) {
items.add(new JdbcHistoricItem(item.getName(), getState(item, m.get(i)[1]), objectAsDate(m.get(i)[0])));
}
return items;
}
/****************************
* SQL generation Providers *
****************************/
static final DateTimeFormatter JDBC_DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
/**
* @param filter
* @param numberDecimalcount
* @param table
* @return
*/
private String histItemFilterQueryProvider(FilterCriteria filter, int numberDecimalcount, String table,
String simpleName) {
logger.debug(
"JDBC::getHistItemFilterQueryProvider filter = {}, numberDecimalcount = {}, table = {}, simpleName = {}",
filter.toString(), numberDecimalcount, table, simpleName);
String filterString = "";
if (filter.getBeginDate() != null) {
filterString += filterString.isEmpty() ? " WHERE" : " AND";
filterString += " TIME>'" + JDBC_DATE_FORMAT.format(filter.getBeginDate()) + "'";
}
if (filter.getEndDate() != null) {
filterString += filterString.isEmpty() ? " WHERE" : " AND";
filterString += " TIME<'" + JDBC_DATE_FORMAT.format(filter.getEndDate()) + "'";
}
filterString += (filter.getOrdering() == Ordering.ASCENDING) ? " ORDER BY time ASC" : " ORDER BY time DESC";
if (filter.getPageSize() != 0x7fffffff) {
// see:
// http://www.jooq.org/doc/3.5/manual/sql-building/sql-statements/select-statement/limit-clause/
filterString += " OFFSET " + filter.getPageNumber() * filter.getPageSize() + " LIMIT "
+ filter.getPageSize();
}
String queryString = "NUMBERITEM".equalsIgnoreCase(simpleName) && numberDecimalcount > -1
? "SELECT time, ROUND(CAST (value AS numeric)," + numberDecimalcount + ") FROM " + table
: "SELECT time, value FROM " + table;
if (!filterString.isEmpty()) {
queryString += filterString;
}
logger.debug("JDBC::query queryString = {}", queryString);
return queryString;
}
/*****************
* H E L P E R S *
*****************/
/******************************
* public Getters and Setters *
******************************/
}

View File

@@ -0,0 +1,114 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.db;
import org.knowm.yank.Yank;
import org.openhab.core.items.Item;
import org.openhab.persistence.jdbc.model.ItemVO;
import org.openhab.persistence.jdbc.model.ItemsVO;
import org.openhab.persistence.jdbc.utils.StringUtilsExt;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extended Database Configuration class. Class represents
* the extended database-specific configuration. Overrides and supplements the
* default settings from JdbcBaseDAO. Enter only the differences to JdbcBaseDAO here.
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class JdbcSqliteDAO extends JdbcBaseDAO {
private final Logger logger = LoggerFactory.getLogger(JdbcSqliteDAO.class);
/********
* INIT *
********/
public JdbcSqliteDAO() {
super();
initSqlQueries();
initSqlTypes();
initDbProps();
}
private void initSqlQueries() {
logger.debug("JDBC::initSqlQueries: '{}'", this.getClass().getSimpleName());
sqlGetDB = "PRAGMA DATABASE_LIST"; // "SELECT SQLITE_VERSION()"; // "PRAGMA DATABASE_LIST"->db Path/Name
// "PRAGMA SCHEMA_VERSION";
sqlIfTableExists = "SELECT name FROM sqlite_master WHERE type='table' AND name='#searchTable#'";
sqlCreateItemsTableIfNot = "CREATE TABLE IF NOT EXISTS #itemsManageTable# (ItemId INTEGER PRIMARY KEY AUTOINCREMENT, #colname# #coltype# NOT NULL)";
sqlInsertItemValue = "INSERT OR IGNORE INTO #tableName# (TIME, VALUE) VALUES( #tablePrimaryValue#, CAST( ? as #dbType#) )";
}
/**
* INFO: http://www.java2s.com/Code/Java/Database-SQL-JDBC/StandardSQLDataTypeswithTheirJavaEquivalents.htm
*/
private void initSqlTypes() {
logger.debug("JDBC::initSqlTypes: Initialize the type array");
sqlTypes.put("tablePrimaryValue", "strftime('%Y-%m-%d %H:%M:%f' , 'now' , 'localtime')");
}
/**
* INFO: https://github.com/brettwooldridge/HikariCP
*/
private void initDbProps() {
// Properties for HikariCP
databaseProps.setProperty("driverClassName", "org.sqlite.JDBC");
// driverClassName OR BETTER USE dataSourceClassName
// databaseProps.setProperty("dataSourceClassName", "org.sqlite.SQLiteDataSource");
}
/**************
* ITEMS DAOs *
**************/
@Override
public String doGetDB() {
return Yank.queryColumn(sqlGetDB, "file", String.class, null).get(0);
}
@Override
public ItemsVO doCreateItemsTableIfNot(ItemsVO vo) {
String sql = StringUtilsExt.replaceArrayMerge(sqlCreateItemsTableIfNot,
new String[] { "#itemsManageTable#", "#colname#", "#coltype#" },
new String[] { vo.getItemsManageTable(), vo.getColname(), vo.getColtype() });
logger.debug("JDBC::doCreateItemsTableIfNot sql={}", sql);
Yank.execute(sql, null);
return vo;
}
/*************
* ITEM DAOs *
*************/
@Override
public void doStoreItemValue(Item item, ItemVO vo) {
vo = storeItemValueProvider(item, vo);
String sql = StringUtilsExt.replaceArrayMerge(sqlInsertItemValue,
new String[] { "#tableName#", "#dbType#", "#tablePrimaryValue#" },
new String[] { vo.getTableName(), vo.getDbType(), sqlTypes.get("tablePrimaryValue") });
Object[] params = new Object[] { vo.getValue() };
logger.debug("JDBC::doStoreItemValue sql={} value='{}'", sql, vo.getValue());
Yank.execute(sql, params);
}
/****************************
* SQL generation Providers *
****************************/
/*****************
* H E L P E R S *
*****************/
/******************************
* public Getters and Setters *
******************************/
}

View File

@@ -0,0 +1,383 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.internal;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.persistence.jdbc.db.JdbcBaseDAO;
import org.openhab.persistence.jdbc.utils.MovingAverage;
import org.openhab.persistence.jdbc.utils.StringUtilsExt;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Configuration class
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class JdbcConfiguration {
private final Logger logger = LoggerFactory.getLogger(JdbcConfiguration.class);
private static final Pattern EXTRACT_CONFIG_PATTERN = Pattern.compile("^(.*?)\\.([0-9.a-zA-Z]+)$");
private static final String DB_DAO_PACKAGE = "org.openhab.persistence.jdbc.db.Jdbc";
private Map<Object, Object> configuration;
private JdbcBaseDAO dBDAO = null;
private String dbName = null;
boolean dbConnected = false;
boolean driverAvailable = false;
private String serviceName;
private String name = "jdbc";
public final boolean valid;
// private String url;
// private String user;
// private String password;
private int numberDecimalcount = 3;
private boolean tableUseRealItemNames = false;
private String tableNamePrefix = "item";
private int tableIdDigitCount = 4;
private boolean rebuildTableNames = false;
private int errReconnectThreshold = 0;
public int timerCount = 0;
public int time1000Statements = 0;
public long timer1000 = 0;
public MovingAverage timeAverage50arr = new MovingAverage(50);
public MovingAverage timeAverage100arr = new MovingAverage(100);
public MovingAverage timeAverage200arr = new MovingAverage(200);
public boolean enableLogTime = false;
public JdbcConfiguration(Map<Object, Object> configuration) {
logger.debug("JDBC::JdbcConfiguration");
valid = updateConfig(configuration);
}
private boolean updateConfig(Map<Object, @Nullable Object> config) {
configuration = config;
logger.debug("JDBC::updateConfig: configuration size = {}", configuration.size());
String user = (String) configuration.get("user");
String password = (String) configuration.get("password");
// mandatory url
String url = (String) configuration.get("url");
if (url == null) {
logger.error("Mandatory url parameter is missing in configuration!");
return false;
}
Properties parsedURL = StringUtilsExt.parseJdbcURL(url);
if (user == null || user.isBlank()) {
logger.debug("No jdbc:user parameter defined in jdbc.cfg");
}
if (password == null || password.isBlank()) {
logger.debug("No jdbc:password parameter defined in jdbc.cfg.");
}
if (url.isBlank()) {
logger.debug(
"JDBC url is missing - please configure in jdbc.cfg like 'jdbc:<service>:<host>[:<port>;<attributes>]'");
return false;
}
if ("false".equalsIgnoreCase(parsedURL.getProperty("parseValid"))) {
Enumeration<?> en = parsedURL.propertyNames();
String enstr = "";
for (Object key : Collections.list(en)) {
enstr += key + " = " + parsedURL.getProperty("" + key) + "\n";
}
logger.warn(
"JDBC url is not well formatted: {}\nPlease configure in openhab.cfg like 'jdbc:<service>:<host>[:<port>;<attributes>]'",
enstr);
return false;
}
logger.debug("JDBC::updateConfig: user={}", user);
logger.debug("JDBC::updateConfig: password exists? {}", password != null && !password.isBlank());
logger.debug("JDBC::updateConfig: url={}", url);
// set database type and database type class
setDBDAOClass(parsedURL.getProperty("dbShortcut")); // derby, h2, hsqldb, mariadb, mysql, postgresql,
// sqlite
// set user
if (user != null && !user.isBlank()) {
dBDAO.databaseProps.setProperty("dataSource.user", user);
}
// set password
if (password != null && !password.isBlank()) {
dBDAO.databaseProps.setProperty("dataSource.password", password);
}
// set sql-types from external config
setSqlTypes();
final Pattern isNumericPattern = Pattern.compile("\\d+(\\.\\d+)?");
String et = (String) configuration.get("reconnectCnt");
if (et != null && !et.isBlank() && isNumericPattern.matcher(et).matches()) {
errReconnectThreshold = Integer.parseInt(et);
logger.debug("JDBC::updateConfig: errReconnectThreshold={}", errReconnectThreshold);
}
String np = (String) configuration.get("tableNamePrefix");
if (np != null && !np.isBlank()) {
tableNamePrefix = np;
logger.debug("JDBC::updateConfig: tableNamePrefix={}", tableNamePrefix);
}
String dd = (String) configuration.get("numberDecimalcount");
if (dd != null && !dd.isBlank() && isNumericPattern.matcher(dd).matches()) {
numberDecimalcount = Integer.parseInt(dd);
logger.debug("JDBC::updateConfig: numberDecimalcount={}", numberDecimalcount);
}
String rn = (String) configuration.get("tableUseRealItemNames");
if (rn != null && !rn.isBlank()) {
tableUseRealItemNames = "true".equals(rn) ? Boolean.parseBoolean(rn) : false;
logger.debug("JDBC::updateConfig: tableUseRealItemNames={}", tableUseRealItemNames);
}
String td = (String) configuration.get("tableIdDigitCount");
if (td != null && !td.isBlank() && isNumericPattern.matcher(td).matches()) {
tableIdDigitCount = Integer.parseInt(td);
logger.debug("JDBC::updateConfig: tableIdDigitCount={}", tableIdDigitCount);
}
String rt = (String) configuration.get("rebuildTableNames");
if (rt != null && !rt.isBlank()) {
rebuildTableNames = Boolean.parseBoolean(rt);
logger.debug("JDBC::updateConfig: rebuildTableNames={}", rebuildTableNames);
}
// undocumented
String ac = (String) configuration.get("maximumPoolSize");
if (ac != null && !ac.isBlank()) {
dBDAO.databaseProps.setProperty("maximumPoolSize", ac);
}
// undocumented
String ic = (String) configuration.get("minimumIdle");
if (ic != null && !ic.isBlank()) {
dBDAO.databaseProps.setProperty("minimumIdle", ic);
}
// undocumented
String it = (String) configuration.get("idleTimeout");
if (it != null && !it.isBlank()) {
dBDAO.databaseProps.setProperty("idleTimeout", it);
}
// undocumented
String ent = (String) configuration.get("enableLogTime");
if (ent != null && !ent.isBlank()) {
enableLogTime = "true".equals(ent) ? Boolean.parseBoolean(ent) : false;
}
logger.debug("JDBC::updateConfig: enableLogTime {}", enableLogTime);
// undocumented
String fd = (String) configuration.get("driverClassName");
if (fd != null && !fd.isBlank()) {
dBDAO.databaseProps.setProperty("driverClassName", fd);
}
// undocumented
String ds = (String) configuration.get("dataSourceClassName");
if (ds != null && !ds.isBlank()) {
dBDAO.databaseProps.setProperty("dataSourceClassName", ds);
}
// undocumented
String dn = dBDAO.databaseProps.getProperty("driverClassName");
if (dn == null) {
dn = dBDAO.databaseProps.getProperty("dataSourceClassName");
} else {
dBDAO.databaseProps.setProperty("jdbcUrl", url);
}
// test if JDBC driver bundle is available
testJDBCDriver(dn);
logger.debug("JDBC::updateConfig: configuration complete. service={}", getName());
return true;
}
private void setDBDAOClass(String sn) {
serviceName = "none";
// set database type
if (sn == null || sn.isBlank() || sn.length() < 2) {
logger.error(
"JDBC::updateConfig: Required database url like 'jdbc:<service>:<host>[:<port>;<attributes>]' - please configure the jdbc:url parameter in openhab.cfg");
} else {
serviceName = sn;
}
logger.debug("JDBC::updateConfig: found serviceName = '{}'", serviceName);
// set class for database type
String ddp = DB_DAO_PACKAGE + serviceName.toUpperCase().charAt(0) + serviceName.toLowerCase().substring(1)
+ "DAO";
logger.debug("JDBC::updateConfig: Init Data Access Object Class: '{}'", ddp);
try {
dBDAO = (JdbcBaseDAO) Class.forName(ddp).newInstance();
logger.debug("JDBC::updateConfig: dBDAO ClassName={}", dBDAO.getClass().getName());
} catch (InstantiationException e) {
logger.error("JDBC::updateConfig: InstantiationException: {}", e.getMessage());
} catch (IllegalAccessException e) {
logger.error("JDBC::updateConfig: IllegalAccessException: {}", e.getMessage());
} catch (ClassNotFoundException e) {
logger.warn("JDBC::updateConfig: no Configuration for serviceName '{}' found. ClassNotFoundException: {}",
serviceName, e.getMessage());
logger.debug("JDBC::updateConfig: using default Database Configuration: JdbcBaseDAO !!");
dBDAO = new JdbcBaseDAO();
logger.debug("JDBC::updateConfig: dBConfig done");
}
}
private void setSqlTypes() {
Set<Object> keys = configuration.keySet();
for (Object k : keys) {
String key = (String) k;
Matcher matcher = EXTRACT_CONFIG_PATTERN.matcher(key);
if (!matcher.matches()) {
continue;
}
matcher.reset();
matcher.find();
if (!matcher.group(1).equals("sqltype")) {
continue;
}
String itemType = matcher.group(2);
if (!itemType.startsWith("table")) {
itemType = itemType.toUpperCase() + "ITEM";
}
String value = (String) configuration.get(key);
logger.debug("JDBC::updateConfig: set sqlTypes: itemType={} value={}", itemType, value);
dBDAO.sqlTypes.put(itemType, value);
}
}
private void testJDBCDriver(String driver) {
driverAvailable = true;
try {
Class.forName(driver);
logger.debug("JDBC::updateConfig: load JDBC-driverClass was successful: '{}'", driver);
} catch (ClassNotFoundException e) {
driverAvailable = false;
logger.error(
"JDBC::updateConfig: could NOT load JDBC-driverClassName or JDBC-dataSourceClassName. ClassNotFoundException: '{}'",
e.getMessage());
String warn = ""
+ "\n\n\t!!!\n\tTo avoid this error, place an appropriate JDBC driver file for serviceName '{}' in addons directory.\n"
+ "\tCopy missing JDBC-Driver-jar to your OpenHab/addons Folder.\n\t!!!\n" + "\tDOWNLOAD: \n";
if (serviceName.equals("derby")) {
warn += "\tDerby: version >= 10.11.1.1 from http://mvnrepository.com/artifact/org.apache.derby/derby\n";
} else if (serviceName.equals("h2")) {
warn += "\tH2: version >= 1.4.189 from http://mvnrepository.com/artifact/com.h2database/h2\n";
} else if (serviceName.equals("hsqldb")) {
warn += "\tHSQLDB: version >= 2.3.3 from http://mvnrepository.com/artifact/org.hsqldb/hsqldb\n";
} else if (serviceName.equals("mariadb")) {
warn += "\tMariaDB: version >= 1.2.0 from http://mvnrepository.com/artifact/org.mariadb.jdbc/mariadb-java-client\n";
} else if (serviceName.equals("mysql")) {
warn += "\tMySQL: version >= 5.1.36 from http://mvnrepository.com/artifact/mysql/mysql-connector-java\n";
} else if (serviceName.equals("postgresql")) {
warn += "\tPostgreSQL:version >= 9.4.1208 from http://mvnrepository.com/artifact/org.postgresql/postgresql\n";
} else if (serviceName.equals("sqlite")) {
warn += "\tSQLite: version >= 3.16.1 from http://mvnrepository.com/artifact/org.xerial/sqlite-jdbc\n";
}
logger.warn(warn, serviceName);
}
}
public Properties getHikariConfiguration() {
return dBDAO.databaseProps;
}
public String getName() {
// return serviceName;
return name;
}
public String getServiceName() {
return serviceName;
}
public String getTableNamePrefix() {
return tableNamePrefix;
}
public int getErrReconnectThreshold() {
return errReconnectThreshold;
}
public boolean getRebuildTableNames() {
return rebuildTableNames;
}
public int getNumberDecimalcount() {
return numberDecimalcount;
}
public boolean getTableUseRealItemNames() {
return tableUseRealItemNames;
}
public int getTableIdDigitCount() {
return tableIdDigitCount;
}
public JdbcBaseDAO getDBDAO() {
return dBDAO;
}
public String getDbName() {
return dbName;
}
public void setDbName(String dbName) {
this.dbName = dbName;
}
public boolean isDbConnected() {
return dbConnected;
}
public void setDbConnected(boolean dbConnected) {
logger.debug("JDBC::setDbConnected {}", dbConnected);
// Initializing step, after db is connected.
// Initialize sqlTypes, depending on DB version for example
dBDAO.initAfterFirstDbConnection();
// Running once again to prior external configured SqlTypes!
setSqlTypes();
this.dbConnected = dbConnected;
}
public boolean isDriverAvailable() {
return driverAvailable;
}
}

View File

@@ -0,0 +1,418 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.internal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.knowm.yank.Yank;
import org.openhab.core.items.Item;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.core.persistence.PersistenceItemInfo;
import org.openhab.persistence.jdbc.model.ItemVO;
import org.openhab.persistence.jdbc.model.ItemsVO;
import org.openhab.persistence.jdbc.model.JdbcPersistenceItemInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Mapper class
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class JdbcMapper {
private final Logger logger = LoggerFactory.getLogger(JdbcMapper.class);
// Error counter - used to reconnect to database on error
protected int errCnt;
protected boolean initialized = false;
protected JdbcConfiguration conf = null;
protected final Map<String, String> sqlTables = new HashMap<>();
private long afterAccessMin = 10000;
private long afterAccessMax = 0;
private static final String ITEM_NAME_PATTERN = "[^a-zA-Z_0-9\\-]";
/*****************
* MAPPER ITEMS *
*****************/
public boolean pingDB() {
logger.debug("JDBC::pingDB");
boolean ret = false;
long timerStart = System.currentTimeMillis();
if (openConnection()) {
if (conf.getDbName() == null) {
logger.debug(
"JDBC::pingDB asking db for name as absolutely first db action, after connection is established.");
String dbName = conf.getDBDAO().doGetDB();
conf.setDbName(dbName);
ret = dbName.length() > 0;
} else {
ret = conf.getDBDAO().doPingDB() > 0;
}
}
logTime("pingDB", timerStart, System.currentTimeMillis());
return ret;
}
public String getDB() {
logger.debug("JDBC::getDB");
long timerStart = System.currentTimeMillis();
String res = conf.getDBDAO().doGetDB();
logTime("pingDB", timerStart, System.currentTimeMillis());
return res;
}
public ItemsVO createNewEntryInItemsTable(ItemsVO vo) {
logger.debug("JDBC::createNewEntryInItemsTable");
long timerStart = System.currentTimeMillis();
Long i = conf.getDBDAO().doCreateNewEntryInItemsTable(vo);
vo.setItemid(i.intValue());
logTime("doCreateNewEntryInItemsTable", timerStart, System.currentTimeMillis());
return vo;
}
public boolean createItemsTableIfNot(ItemsVO vo) {
logger.debug("JDBC::createItemsTableIfNot");
long timerStart = System.currentTimeMillis();
conf.getDBDAO().doCreateItemsTableIfNot(vo);
logTime("doCreateItemsTableIfNot", timerStart, System.currentTimeMillis());
return true;
}
public ItemsVO deleteItemsEntry(ItemsVO vo) {
logger.debug("JDBC::deleteItemsEntry");
long timerStart = System.currentTimeMillis();
conf.getDBDAO().doDeleteItemsEntry(vo);
logTime("deleteItemsEntry", timerStart, System.currentTimeMillis());
return vo;
}
public List<ItemsVO> getItemIDTableNames() {
logger.debug("JDBC::getItemIDTableNames");
long timerStart = System.currentTimeMillis();
List<ItemsVO> vo = conf.getDBDAO().doGetItemIDTableNames(new ItemsVO());
logTime("getItemIDTableNames", timerStart, System.currentTimeMillis());
return vo;
}
public List<ItemsVO> getItemTables() {
logger.debug("JDBC::getItemTables");
long timerStart = System.currentTimeMillis();
ItemsVO vo = new ItemsVO();
vo.setJdbcUriDatabaseName(conf.getDbName());
List<ItemsVO> vol = conf.getDBDAO().doGetItemTables(vo);
logTime("getItemTables", timerStart, System.currentTimeMillis());
return vol;
}
/****************
* MAPPERS ITEM *
****************/
public void updateItemTableNames(List<ItemVO> vol) {
logger.debug("JDBC::updateItemTableNames");
long timerStart = System.currentTimeMillis();
conf.getDBDAO().doUpdateItemTableNames(vol);
logTime("updateItemTableNames", timerStart, System.currentTimeMillis());
}
public ItemVO createItemTable(ItemVO vo) {
logger.debug("JDBC::createItemTable");
long timerStart = System.currentTimeMillis();
conf.getDBDAO().doCreateItemTable(vo);
logTime("createItemTable", timerStart, System.currentTimeMillis());
return vo;
}
public Item storeItemValue(Item item) {
logger.debug("JDBC::storeItemValue: item={}", item.toString());
String tableName = getTable(item);
if (tableName == null) {
logger.error("JDBC::store: Unable to store item '{}'.", item.getName());
return item;
}
long timerStart = System.currentTimeMillis();
conf.getDBDAO().doStoreItemValue(item, new ItemVO(tableName, null));
logTime("storeItemValue", timerStart, System.currentTimeMillis());
errCnt = 0;
return item;
}
public List<HistoricItem> getHistItemFilterQuery(FilterCriteria filter, int numberDecimalcount, String table,
Item item) {
logger.debug(
"JDBC::getHistItemFilterQuery filter='{}' numberDecimalcount='{}' table='{}' item='{}' itemName='{}'",
(filter != null), numberDecimalcount, table, item, item.getName());
if (table != null) {
long timerStart = System.currentTimeMillis();
List<HistoricItem> r = conf.getDBDAO().doGetHistItemFilterQuery(item, filter, numberDecimalcount, table,
item.getName());
logTime("insertItemValue", timerStart, System.currentTimeMillis());
return r;
} else {
logger.error("JDBC::getHistItemFilterQuery: TABLE is NULL; cannot get data from non-existent table.");
}
return null;
}
/***********************
* DATABASE CONNECTION *
***********************/
protected boolean openConnection() {
logger.debug("JDBC::openConnection isDriverAvailable: {}", conf.isDriverAvailable());
if (conf.isDriverAvailable() && !conf.isDbConnected()) {
logger.info("JDBC::openConnection: Driver is available::Yank setupDataSource");
Yank.setupDefaultConnectionPool(conf.getHikariConfiguration());
conf.setDbConnected(true);
return true;
} else if (!conf.isDriverAvailable()) {
logger.warn("JDBC::openConnection: no driver available!");
initialized = false;
return false;
}
return true;
}
protected void closeConnection() {
logger.debug("JDBC::closeConnection");
// Closes all open connection pools
Yank.releaseDefaultConnectionPool();
conf.setDbConnected(false);
}
protected boolean checkDBAccessability() {
// Check if connection is valid
if (initialized) {
return true;
}
// first
boolean p = pingDB();
if (p) {
logger.debug("JDBC::checkDBAcessability, first try connection: {}", p);
return (p && !(conf.getErrReconnectThreshold() > 0 && errCnt <= conf.getErrReconnectThreshold()));
} else {
// second
p = pingDB();
logger.debug("JDBC::checkDBAcessability, second try connection: {}", p);
return (p && !(conf.getErrReconnectThreshold() > 0 && errCnt <= conf.getErrReconnectThreshold()));
}
}
/**************************
* DATABASE TABLEHANDLING *
**************************/
protected void checkDBSchema() {
// Create Items Table if does not exist
createItemsTableIfNot(new ItemsVO());
if (conf.getRebuildTableNames()) {
formatTableNames();
logger.info(
"JDBC::checkDBSchema: Rebuild complete, configure the 'rebuildTableNames' setting to 'false' to stop rebuilds on startup");
} else {
List<ItemsVO> al;
// Reset the error counter
errCnt = 0;
al = getItemIDTableNames();
for (int i = 0; i < al.size(); i++) {
String t = getTableName(al.get(i).getItemid(), al.get(i).getItemname());
sqlTables.put(al.get(i).getItemname(), t);
}
}
}
protected String getTable(Item item) {
int rowId = 0;
ItemsVO isvo;
ItemVO ivo;
String itemName = item.getName();
String tableName = sqlTables.get(itemName);
// Table already exists - return the name
if (tableName != null) {
return tableName;
}
logger.debug("JDBC::getTable: no table found for item '{}' in sqlTables", itemName);
// Create a new entry in items table
isvo = new ItemsVO();
isvo.setItemname(itemName);
isvo = createNewEntryInItemsTable(isvo);
rowId = isvo.getItemid();
if (rowId == 0) {
logger.error("JDBC::getTable: Creating table for item '{}' failed.", itemName);
}
// Create the table name
logger.debug("JDBC::getTable: getTableName with rowId={} itemName={}", rowId, itemName);
tableName = getTableName(rowId, itemName);
// An error occurred adding the item name into the index list!
if (tableName == null) {
logger.error("JDBC::getTable: tableName was null; could not create a table for item '{}'", itemName);
return null;
}
// Create table for item
String dataType = conf.getDBDAO().getDataType(item);
ivo = new ItemVO(tableName, itemName);
ivo.setDbType(dataType);
ivo = createItemTable(ivo);
logger.debug("JDBC::getTable: Table created for item '{}' with dataType {} in SQL database.", itemName,
dataType);
sqlTables.put(itemName, tableName);
// Check if the new entry is in the table list
// If it's not in the list, then there was an error and we need to do
// some tidying up
// The item needs to be removed from the index table to avoid duplicates
if (sqlTables.get(itemName) == null) {
logger.error("JDBC::getTable: Item '{}' was not added to the table - removing index", itemName);
isvo = new ItemsVO();
isvo.setItemname(itemName);
deleteItemsEntry(isvo);
}
return tableName;
}
private void formatTableNames() {
boolean tmpinit = initialized;
if (tmpinit) {
initialized = false;
}
List<ItemsVO> al;
Map<Integer, String> tableIds = new HashMap<>();
//
al = getItemIDTableNames();
for (int i = 0; i < al.size(); i++) {
String t = getTableName(al.get(i).getItemid(), al.get(i).getItemname());
sqlTables.put(al.get(i).getItemname(), t);
tableIds.put(al.get(i).getItemid(), t);
}
//
al = getItemTables();
String oldName = "";
String newName = "";
List<ItemVO> oldNewTablenames = new ArrayList<>();
for (int i = 0; i < al.size(); i++) {
int id = -1;
oldName = al.get(i).getTable_name();
logger.info("JDBC::formatTableNames: found Table Name= {}", oldName);
if (oldName.startsWith(conf.getTableNamePrefix()) && !oldName.contains("_")) {
id = Integer.parseInt(oldName.substring(conf.getTableNamePrefix().length()));
logger.info("JDBC::formatTableNames: found Table with Prefix '{}' Name= {} id= {}",
conf.getTableNamePrefix(), oldName, (id));
} else if (oldName.contains("_")) {
id = Integer.parseInt(oldName.substring(oldName.lastIndexOf("_") + 1));
logger.info("JDBC::formatTableNames: found Table Name= {} id= {}", oldName, (id));
}
logger.info("JDBC::formatTableNames: found Table id= {}", id);
newName = tableIds.get(id);
logger.info("JDBC::formatTableNames: found Table newName= {}", newName);
if (newName != null) {
if (!oldName.equalsIgnoreCase(newName)) {
oldNewTablenames.add(new ItemVO(oldName, newName));
logger.info("JDBC::formatTableNames: Table '{}' will be renamed to '{}'", oldName, newName);
} else {
logger.info("JDBC::formatTableNames: Table oldName='{}' newName='{}' nothing to rename", oldName,
newName);
}
} else {
logger.error("JDBC::formatTableNames: Table '{}' could NOT be renamed to '{}'", oldName, newName);
break;
}
}
updateItemTableNames(oldNewTablenames);
logger.info("JDBC::formatTableNames: Finished updating {} item table names", oldNewTablenames.size());
initialized = tmpinit;
}
private String getTableName(int rowId, String itemName) {
return getTableNamePrefix(itemName) + formatRight(rowId, conf.getTableIdDigitCount());
}
private String getTableNamePrefix(String itemName) {
String name = conf.getTableNamePrefix();
if (conf.getTableUseRealItemNames()) {
// Create the table name with real Item Names
name = (itemName.replaceAll(ITEM_NAME_PATTERN, "") + "_").toLowerCase();
}
return name;
}
public Set<PersistenceItemInfo> getItems() {
// TODO: in general it would be possible to query the count, earliest and latest values for each item too but it
// would be a very costly operation
return sqlTables.keySet().stream().map(itemName -> new JdbcPersistenceItemInfo(itemName))
.collect(Collectors.<PersistenceItemInfo> toUnmodifiableSet());
}
private static String formatRight(final Object value, final int len) {
final String valueAsString = String.valueOf(value);
if (valueAsString.length() < len) {
final StringBuffer result = new StringBuffer(len);
for (int i = len - valueAsString.length(); i > 0; i--) {
result.append('0');
}
result.append(valueAsString);
return result.toString();
} else {
return valueAsString;
}
}
/*****************
* H E L P E R S *
*****************/
private void logTime(String me, long timerStart, long timerStop) {
if (conf.enableLogTime && logger.isInfoEnabled()) {
conf.timerCount++;
int timerDiff = (int) (timerStop - timerStart);
if (timerDiff < afterAccessMin) {
afterAccessMin = timerDiff;
}
if (timerDiff > afterAccessMax) {
afterAccessMax = timerDiff;
}
conf.timeAverage50arr.add(timerDiff);
conf.timeAverage100arr.add(timerDiff);
conf.timeAverage200arr.add(timerDiff);
if (conf.timerCount == 1) {
conf.timer1000 = System.currentTimeMillis();
}
if (conf.timerCount == 1001) {
conf.time1000Statements = Math.round(((int) (System.currentTimeMillis() - conf.timer1000)) / 1000);// Seconds
conf.timerCount = 0;
}
logger.info(
"JDBC::logTime: '{}':\n afterAccess = {} ms\n timeAverage50 = {} ms\n timeAverage100 = {} ms\n timeAverage200 = {} ms\n afterAccessMin = {} ms\n afterAccessMax = {} ms\n 1000Statements = {} sec\n statementCount = {}\n",
me, timerDiff, conf.timeAverage50arr.getAverageInteger(),
conf.timeAverage100arr.getAverageInteger(), conf.timeAverage200arr.getAverageInteger(),
afterAccessMin, afterAccessMax, conf.time1000Statements, conf.timerCount);
}
}
}

View File

@@ -0,0 +1,229 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.internal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.core.items.GroupItem;
import org.openhab.core.items.Item;
import org.openhab.core.items.ItemNotFoundException;
import org.openhab.core.items.ItemRegistry;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.core.persistence.PersistenceItemInfo;
import org.openhab.core.persistence.PersistenceService;
import org.openhab.core.persistence.QueryablePersistenceService;
import org.openhab.core.persistence.strategy.PersistenceStrategy;
import org.openhab.core.types.UnDefType;
import org.osgi.framework.BundleContext;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.ConfigurationPolicy;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Reference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is the implementation of the JDBC {@link PersistenceService}.
*
* @author Helmut Lehmeyer - Initial contribution
* @author Kai Kreuzer - Migration to 3.x
*/
@NonNullByDefault
@Component(service = { PersistenceService.class,
QueryablePersistenceService.class }, configurationPid = "org.openhab.jdbc", configurationPolicy = ConfigurationPolicy.REQUIRE)
public class JdbcPersistenceService extends JdbcMapper implements QueryablePersistenceService {
private final Logger logger = LoggerFactory.getLogger(JdbcPersistenceService.class);
private final ItemRegistry itemRegistry;
@Activate
public JdbcPersistenceService(final @Reference ItemRegistry itemRegistry) {
this.itemRegistry = itemRegistry;
}
/**
* Called by the SCR to activate the component with its configuration read
* from CAS
*
* @param bundleContext
* BundleContext of the Bundle that defines this component
* @param configuration
* Configuration properties for this component obtained from the
* ConfigAdmin service
*/
@Activate
public void activate(BundleContext bundleContext, Map<Object, Object> configuration) {
logger.debug("JDBC::activate: persistence service activated");
updateConfig(configuration);
}
/**
* Called by the SCR to deactivate the component when either the
* configuration is removed or mandatory references are no longer satisfied
* or the component has simply been stopped.
*
* @param reason
* Reason code for the deactivation:<br>
* <ul>
* <li>0 Unspecified
* <li>1 The component was disabled
* <li>2 A reference became unsatisfied
* <li>3 A configuration was changed
* <li>4 A configuration was deleted
* <li>5 The component was disposed
* <li>6 The bundle was stopped
* </ul>
*/
@Deactivate
public void deactivate(final int reason) {
logger.debug("JDBC::deactivate: persistence bundle stopping. Disconnecting from database. reason={}", reason);
// closeConnection();
initialized = false;
}
@Override
public String getId() {
logger.debug("JDBC::getName: returning name 'jdbc' for queryable persistence service.");
return "jdbc";
}
@Override
public String getLabel(@Nullable Locale locale) {
return "JDBC";
}
@Override
public void store(Item item) {
store(item, null);
}
/**
* @{inheritDoc
*/
@Override
public void store(Item item, @Nullable String alias) {
// Don not store undefined/uninitialised data
if (item.getState() instanceof UnDefType) {
logger.debug("JDBC::store: ignore Item '{}' because it is UnDefType", item.getName());
return;
}
if (!checkDBAccessability()) {
logger.warn(
"JDBC::store: No connection to database. Cannot persist item '{}'! Will retry connecting to database when error count:{} equals errReconnectThreshold:{}",
item, errCnt, conf.getErrReconnectThreshold());
return;
}
long timerStart = System.currentTimeMillis();
storeItemValue(item);
logger.debug("JDBC: Stored item '{}' as '{}' in SQL database at {} in {} ms.", item.getName(),
item.getState().toString(), (new java.util.Date()).toString(), System.currentTimeMillis() - timerStart);
}
@Override
public Set<PersistenceItemInfo> getItemInfo() {
return getItems();
}
/**
* Queries the {@link PersistenceService} for data with a given filter
* criteria
*
* @param filter
* the filter to apply to the query
* @return a time series of items
*/
@Override
public Iterable<HistoricItem> query(FilterCriteria filter) {
if (!checkDBAccessability()) {
logger.warn("JDBC::query: database not connected, query aborted for item '{}'", filter.getItemName());
return Collections.emptyList();
}
// Get the item name from the filter
// Also get the Item object so we can determine the type
Item item = null;
String itemName = filter.getItemName();
logger.debug("JDBC::query: item is {}", itemName);
try {
item = itemRegistry.getItem(itemName);
} catch (ItemNotFoundException e1) {
logger.error("JDBC::query: unable to get item for itemName: '{}'. Ignore and give up!", itemName);
return Collections.emptyList();
}
if (item instanceof GroupItem) {
// For Group Item is BaseItem needed to get correct Type of Value.
item = GroupItem.class.cast(item).getBaseItem();
logger.debug("JDBC::query: item is instanceof GroupItem '{}'", itemName);
if (item == null) {
logger.debug("JDBC::query: BaseItem of GroupItem is null. Ignore and give up!");
return Collections.emptyList();
}
if (item instanceof GroupItem) {
logger.debug("JDBC::query: BaseItem of GroupItem is a GroupItem too. Ignore and give up!");
return Collections.emptyList();
}
}
String table = sqlTables.get(itemName);
if (table == null) {
logger.warn(
"JDBC::query: unable to find table for query, no data in database for item '{}'. Current number of tables in the database: {}",
itemName, sqlTables.size());
// if enabled, table will be created immediately
logger.warn("JDBC::query: try to generate the table for item '{}'", itemName);
table = getTable(item);
}
long timerStart = System.currentTimeMillis();
List<HistoricItem> items = new ArrayList<>();
items = getHistItemFilterQuery(filter, conf.getNumberDecimalcount(), table, item);
logger.debug("JDBC::query: query for {} returned {} rows in {} ms", item.getName(), items.size(),
System.currentTimeMillis() - timerStart);
// Success
errCnt = 0;
return items;
}
public void updateConfig(Map<Object, Object> configuration) {
logger.debug("JDBC::updateConfig");
conf = new JdbcConfiguration(configuration);
if (conf.valid && checkDBAccessability()) {
checkDBSchema();
// connection has been established ... initialization completed!
initialized = true;
} else {
initialized = false;
}
logger.debug("JDBC::updateConfig: configuration complete for service={}.", getId());
}
@Override
public List<PersistenceStrategy> getDefaultStrategies() {
return Collections.emptyList();
}
}

View File

@@ -0,0 +1,167 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.model;
import java.io.Serializable;
import java.util.Date;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Represents the Item-data on the part of MyBatis/database.
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class ItemVO implements Serializable {
private final Logger logger = LoggerFactory.getLogger(ItemVO.class);
private static final long serialVersionUID = 1871441039821454890L;
private String tableName;
private String newTableName;
private String dbType;
private String jdbcType;
private String itemType;
private Class<?> javaType;
private Date time;
private Object value;
public ItemVO(String tableName, String newTableName) {
logger.debug("JDBC:ItemVO tableName={}; newTableName={}; ", tableName, newTableName);
this.tableName = tableName;
this.newTableName = newTableName;
}
public ItemVO() {
}
public void setValueTypes(String dbType, Class<?> javaType) {
logger.debug("JDBC:ItemVO setValueTypes dbType={}; javaType={};", dbType, javaType);
this.dbType = dbType;
this.javaType = javaType;
}
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public String getNewTableName() {
return newTableName;
}
public void setNewTableName(String newTableName) {
this.newTableName = newTableName;
}
public String getDbType() {
return dbType;
}
public void setDbType(String dbType) {
this.dbType = dbType;
}
public String getJdbcType() {
return jdbcType;
}
public void setJdbcType(String jdbcType) {
this.jdbcType = jdbcType;
}
public String getItemType() {
return itemType;
}
public void setItemType(String itemType) {
this.itemType = itemType;
}
public String getJavaType() {
return javaType.getName();
}
public void setJavaType(Class<?> javaType) {
this.javaType = javaType;
}
public Date getTime() {
return time;
}
public void setTime(Date time) {
this.time = time;
}
public Object getValue() {
return value;
}
public void setValue(Object value) {
this.value = value;
}
/**
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ItemVO other = (ItemVO) obj;
if (value == null) {
if (other.value != null) {
return false;
}
} else if (!value.equals(other.value)) {
return false;
}
if (time != other.time) {
return false;
}
return true;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("ItemVO [tableName=");
builder.append(tableName);
builder.append(", newTableName=");
builder.append(newTableName);
builder.append(", dbType=");
builder.append(dbType);
builder.append(", javaType=");
builder.append(javaType);
builder.append(", time=");
builder.append(time);
builder.append(", value=");
builder.append(value);
builder.append("]");
return builder.toString();
}
}

View File

@@ -0,0 +1,154 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.model;
import java.io.Serializable;
/**
* Represents the table naming data.
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class ItemsVO implements Serializable {
private static final long serialVersionUID = 2871961811177601520L;
private static final String STR_FILTER = "[^a-zA-Z0-9]";
private String coltype = "VARCHAR(500)";
private String colname = "itemname";
private String itemsManageTable = "items";
private int itemid;
private String itemname;
private String table_name;
private String jdbcUriDatabaseName;
public String getColtype() {
return coltype;
}
public void setColtype(String coltype) {
this.coltype = coltype.replaceAll(STR_FILTER, "");
}
public String getColname() {
return colname;
}
public void setColname(String colname) {
this.colname = colname.replaceAll(STR_FILTER, "");
}
public String getItemsManageTable() {
return itemsManageTable;
}
public void setItemsManageTable(String itemsManageTable) {
this.itemsManageTable = itemsManageTable.replaceAll(STR_FILTER, "");
}
public int getItemid() {
return itemid;
}
public void setItemid(int itemid) {
this.itemid = itemid;
}
public String getItemname() {
return itemname;
}
public void setItemname(String itemname) {
this.itemname = itemname;
}
public String getTable_name() {
return table_name;
}
public void setTable_name(String table_name) {
this.table_name = table_name;
}
public String getJdbcUriDatabaseName() {
return jdbcUriDatabaseName;
}
public void setJdbcUriDatabaseName(String jdbcUriDatabaseName) {
this.jdbcUriDatabaseName = jdbcUriDatabaseName;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((itemname == null) ? 0 : itemname.hashCode());
result = prime * result + (itemid ^ (itemid >>> 32));
return result;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ItemsVO other = (ItemsVO) obj;
if (itemname == null) {
if (other.itemname != null) {
return false;
}
} else if (!itemname.equals(other.itemname)) {
return false;
}
if (itemid != other.itemid) {
return false;
}
return true;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("ItemsVO [coltype=");
builder.append(coltype);
builder.append(", colname=");
builder.append(colname);
builder.append(", itemsManageTable=");
builder.append(itemsManageTable);
builder.append(", itemid=");
builder.append(itemid);
builder.append(", itemname=");
builder.append(itemname);
builder.append(", table_name=");
builder.append(table_name);
builder.append("]");
return builder.toString();
}
}

View File

@@ -0,0 +1,64 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.model;
import java.time.ZonedDateTime;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.core.types.State;
/**
* Represents the data on the part of openHAB.
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class JdbcHistoricItem implements HistoricItem {
private final String name;
private final State state;
private final ZonedDateTime timestamp;
public JdbcHistoricItem(String name, State state, ZonedDateTime timestamp) {
this.name = name;
this.state = state;
this.timestamp = timestamp;
}
@Override
public String getName() {
return name;
}
@Override
public State getState() {
return state;
}
@Override
public ZonedDateTime getTimestamp() {
return timestamp;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("JdbcItem [name=");
builder.append(name);
builder.append(", state=");
builder.append(state);
builder.append(", timestamp=");
builder.append(timestamp);
builder.append("]");
return builder.toString();
}
}

View File

@@ -0,0 +1,65 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.model;
import java.util.Date;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.core.persistence.PersistenceItemInfo;
/**
* Represents the item info for openHAB.
*
* @author Christoph Weitkamp - Initial contribution
*/
@NonNullByDefault
public class JdbcPersistenceItemInfo implements PersistenceItemInfo {
private final String name;
private final @Nullable Integer count;
private final @Nullable Date earliest;
private final @Nullable Date latest;
public JdbcPersistenceItemInfo(String name) {
this(name, null, null, null);
}
public JdbcPersistenceItemInfo(String name, @Nullable Integer count, @Nullable Date earliest,
@Nullable Date latest) {
this.name = name;
this.count = count;
this.earliest = earliest;
this.latest = latest;
}
@Override
public String getName() {
return name;
}
@Override
public @Nullable Integer getCount() {
return count;
}
@Override
public @Nullable Date getEarliest() {
return earliest;
}
@Override
public @Nullable Date getLatest() {
return latest;
}
}

View File

@@ -0,0 +1,126 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.utils;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import org.knowm.yank.Yank;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.zaxxer.hikari.HikariDataSource;
/**
* Meta data class
*
* @author Helmut Lehmeyer - Initial contribution
*/
public class DbMetaData {
private final Logger logger = LoggerFactory.getLogger(DbMetaData.class);
private int dbMajorVersion;
private int dbMinorVersion;
private int driverMajorVersion;
private int driverMinorVersion;
private String dbProductName;
private String dbProductVersion;
public DbMetaData() {
HikariDataSource h = Yank.getDefaultConnectionPool();
// HikariDataSource h = Yank.getDataSource();
DatabaseMetaData meta;
try {
meta = h.getConnection().getMetaData();
// Oracle (and some other vendors) do not support
// some the following methods; therefore, we need
// to use try-catch block.
try {
dbMajorVersion = meta.getDatabaseMajorVersion();
logger.debug("dbMajorVersion = '{}'", dbMajorVersion);
} catch (Exception e) {
logger.error("Asking for 'dbMajorVersion' is unsupported: '{}'", e.getMessage());
}
try {
dbMinorVersion = meta.getDatabaseMinorVersion();
logger.debug("dbMinorVersion = '{}'", dbMinorVersion);
} catch (Exception e) {
logger.error("Asking for 'dbMinorVersion' is unsupported: '{}'", e.getMessage());
}
driverMajorVersion = meta.getDriverMajorVersion();
logger.debug("driverMajorVersion = '{}'", driverMajorVersion);
driverMinorVersion = meta.getDriverMinorVersion();
logger.debug("driverMinorVersion = '{}'", driverMinorVersion);
dbProductName = meta.getDatabaseProductName();
logger.debug("dbProductName = '{}'", dbProductName);
dbProductVersion = meta.getDatabaseProductVersion();
logger.debug("dbProductVersion = '{}'", dbProductVersion);
} catch (SQLException e1) {
logger.error("Asking for 'dbMajorVersion' seems to be unsupported: '{}'", e1.getMessage());
}
}
public int getDbMajorVersion() {
return dbMajorVersion;
}
public int getDbMinorVersion() {
return dbMinorVersion;
}
public boolean isDbVersionGreater(int major, int minor) {
if (dbMajorVersion > major) {
return true;
} else if (dbMajorVersion == major) {
if (dbMinorVersion > minor) {
return true;
}
}
return false;
}
public int getDriverMajorVersion() {
return driverMajorVersion;
}
public int getDriverMinorVersion() {
return driverMinorVersion;
}
public boolean isDriverVersionGreater(int major, int minor) {
if (major > driverMajorVersion) {
return true;
} else if (major == driverMajorVersion) {
if (minor > driverMinorVersion) {
return true;
}
}
return false;
}
public String getDbProductName() {
return dbProductName;
}
public String getDbProductVersion() {
return dbProductVersion;
}
}

View File

@@ -0,0 +1,74 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.utils;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.LinkedList;
import java.util.Queue;
import org.eclipse.jdt.annotation.NonNullByDefault;
/**
* Calculates the average/mean of a number series.
*
* @author Helmut Lehmeyer - Initial contribution
*/
@NonNullByDefault
public class MovingAverage {
private final Queue<BigDecimal> win = new LinkedList<>();
private final int period;
private BigDecimal sum = BigDecimal.ZERO;
public MovingAverage(int period) {
assert period > 0 : "Period must be a positive integer";
this.period = period;
}
public void add(Double num) {
add(new BigDecimal(num));
}
public void add(Long num) {
add(new BigDecimal(num));
}
public void add(Integer num) {
add(new BigDecimal(num));
}
public void add(BigDecimal num) {
sum = sum.add(num);
win.add(num);
if (win.size() > period) {
sum = sum.subtract(win.remove());
}
}
public BigDecimal getAverage() {
if (win.isEmpty()) {
return BigDecimal.ZERO; // technically the average is undefined
}
BigDecimal divisor = BigDecimal.valueOf(win.size());
return sum.divide(divisor, 2, RoundingMode.HALF_UP);
}
public double getAverageDouble() {
return getAverage().doubleValue();
}
public int getAverageInteger() {
return getAverage().intValue();
}
}

View File

@@ -0,0 +1,279 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.jdbc.utils;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.core.persistence.FilterCriteria;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utility class
*
* @author Helmut Lehmeyer - Initial contribution
*/
@NonNullByDefault
public class StringUtilsExt {
private static final Logger LOGGER = LoggerFactory.getLogger(StringUtilsExt.class);
/**
* Replaces multiple found words with the given Array contents
*
* @param str String for replacement
* @param separate A String or Array to be replaced
* @param separators Array will be merged to str
* @return
*/
public static final String replaceArrayMerge(String str, String separate, Object[] separators) {
for (int i = 0; i < separators.length; i++) {
str = str.replaceFirst(separate, (String) separators[i]);
}
return str;
}
/**
* @see #replaceArrayMerge(String str, String separate, Object[] separators)
*/
public static final String replaceArrayMerge(String str, String[] separate, String[] separators) {
for (int i = 0; i < separators.length; i++) {
str = str.replaceFirst(separate[i], separators[i]);
}
return str;
}
/**
* @see #parseJdbcURL(String url, Properties def)
*/
public static Properties parseJdbcURL(String url) {
return parseJdbcURL(url, null);
}
/**
* <b>JDBC-URI Examples:</b><br/>
* jdbc:dbShortcut:c:/dev/databaseName<br/>
* jdbc:dbShortcut:scheme:c:/dev/databaseName<br/>
* jdbc:dbShortcut:scheme:c:\\dev\\databaseName<br/>
* jdbc:dbShortcut:./databaseName<br/>
* jdbc:dbShortcut:/databaseName<br/>
* jdbc:dbShortcut:~/databaseName<br/>
* jdbc:dbShortcut:/path/databaseName.db<br/>
* jdbc:dbShortcut:./../../path/databaseName<br/>
* jdbc:dbShortcut:scheme:./path/../path/databaseName;param1=true;<br/>
* jdbc:dbShortcut://192.168.0.145:3306/databaseName?param1=false&param2=true
* <p/>
*
* @param url JDBC-URI
* @param def Predefined Properties Object
* @return A merged Properties Object may contain:<br/>
* parseValid (mandatory)<br/>
* scheme<br/>
* serverPath<br/>
* dbShortcut<br/>
* databaseName<br/>
* portNumber<br/>
* serverName<br/>
* pathQuery<br/>
*/
public static Properties parseJdbcURL(String url, @Nullable Properties def) {
Properties props;
if (def == null) {
props = new Properties();
} else {
props = new Properties(def);
}
if (url == null || url.length() < 9) {
return props;
}
// replace all \
if (url.contains("\\")) {
url = url.replaceAll("\\\\", "/");
}
// replace first ; with ?
if (url.contains(";")) {
// replace first ; with ?
url = url.replaceFirst(";", "?");
// replace other ; with &
url = url.replaceAll(";", "&");
}
if (url.split(":").length < 3 || url.indexOf("/") == -1) {
LOGGER.error("parseJdbcURL: URI '{}' is not well formated, expected uri like 'jdbc:dbShortcut:/path'", url);
props.put("parseValid", "false");
return props;
}
String[] protAndDb = stringBeforeSubstr(url, ":", 1).split(":");
if (!"jdbc".equals(protAndDb[0])) {
LOGGER.error("parseJdbcURL: URI '{}' is not well formated, expected suffix 'jdbc' found '{}'", url,
protAndDb[0]);
props.put("parseValid", "false");
return props;
}
props.put("parseValid", "true");
props.put("dbShortcut", protAndDb[1]);
URI dbURI = null;
try {
dbURI = new URI(stringAfterSubstr(url, ":", 1).replaceFirst(" ", ""));
if (dbURI.getScheme() != null) {
props.put("scheme", dbURI.getScheme());
dbURI = new URI(stringAfterSubstr(url, ":", 2).replaceFirst(" ", ""));
}
} catch (URISyntaxException e) {
LOGGER.error("parseJdbcURL: URI '{}' is not well formated.", url, e);
return props;
}
// Query-Parameters
if (dbURI.getQuery() != null) {
String[] q = dbURI.getQuery().split("&");
for (int i = 0; i < q.length; i++) {
String[] t = q[i].split("=");
props.put(t[0], t[1]);
}
props.put("pathQuery", dbURI.getQuery());
}
String path = "";
if (dbURI.getPath() != null) {
String gp = dbURI.getPath();
String st = "/";
if (gp.indexOf("/") <= 1) {
if (substrPos(gp, st).size() > 1) {
path = stringBeforeLastSubstr(gp, st) + st;
} else {
path = stringBeforeSubstr(gp, st) + st;
}
}
if (dbURI.getScheme() != null && dbURI.getScheme().length() == 1) {
path = dbURI.getScheme() + ":" + path;
}
props.put("serverPath", path);
}
if (dbURI.getPath() != null) {
props.put("databaseName", stringAfterLastSubstr(dbURI.getPath(), "/"));
}
if (dbURI.getPort() != -1) {
props.put("portNumber", dbURI.getPort() + "");
}
if (dbURI.getHost() != null) {
props.put("serverName", dbURI.getHost());
}
return props;
}
/**
* Returns a String before the last occurrence of a substring
*/
public static String stringBeforeLastSubstr(String s, String substr) {
List<Integer> a = substrPos(s, substr);
return s.substring(0, a.get(a.size() - 1));
}
/**
* Returns a String after the last occurrence of a substring
*/
public static String stringAfterLastSubstr(String s, String substr) {
List<Integer> a = substrPos(s, substr);
return s.substring(a.get(a.size() - 1) + 1);
}
/**
* Returns a String after the first occurrence of a substring
*/
public static String stringAfterSubstr(String s, String substr) {
return s.substring(s.indexOf(substr) + 1);
}
/**
* Returns a String after the n occurrence of a substring
*/
public static String stringAfterSubstr(String s, String substr, int n) {
return s.substring(substrPos(s, substr).get(n) + 1);
}
/**
* Returns a String before the first occurrence of a substring
*/
public static String stringBeforeSubstr(String s, String substr) {
return s.substring(0, s.indexOf(substr));
}
/**
* Returns a String before the n occurrence of a substring.
*/
public static String stringBeforeSubstr(String s, String substr, int n) {
return s.substring(0, substrPos(s, substr).get(n));
}
/**
* Returns a list with indices of the occurrence of a substring.
*/
public static List<Integer> substrPos(String s, String substr) {
return substrPos(s, substr, true);
}
/**
* Returns a list with indices of the occurrence of a substring.
*/
public static List<Integer> substrPos(String s, String substr, boolean ignoreCase) {
int substrLength = substr.length();
int strLength = s.length();
List<Integer> arr = new ArrayList<>();
for (int i = 0; i < strLength - substrLength + 1; i++) {
if (s.regionMatches(ignoreCase, i, substr, 0, substrLength)) {
arr.add(i);
}
}
return arr;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
public static String filterToString(FilterCriteria filter) {
StringBuilder builder = new StringBuilder();
builder.append("FilterCriteria [itemName=");
builder.append(filter.getItemName());
builder.append(", beginDate=");
builder.append(filter.getBeginDate());
builder.append(", endDate=");
builder.append(filter.getEndDate());
builder.append(", pageNumber=");
builder.append(filter.getPageNumber());
builder.append(", pageSize=");
builder.append(filter.getPageSize());
builder.append(", operator=");
builder.append(filter.getOperator());
builder.append(", ordering=");
builder.append(filter.getOrdering());
builder.append(", state=");
builder.append(filter.getState());
builder.append("]");
return builder.toString();
}
}

View File

@@ -0,0 +1,209 @@
<?xml version="1.0" encoding="UTF-8"?>
<config-description:config-descriptions
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:config-description="https://openhab.org/schemas/config-description/v1.0.0"
xsi:schemaLocation="https://openhab.org/schemas/config-description/v1.0.0
https://openhab.org/schemas/config-description-1.0.0.xsd">
<config-description uri="persistence:jdbc">
<!--
# I N S T A L L J D B C P E R S I S T E N C E S E R V I C E
#
# https://github.com/openhab/openhab/wiki/JDBC-Persistence
#
# Tested databases/url-prefix: jdbc:derby, jdbc:h2, jdbc:hsqldb, jdbc:mariadb, jdbc:mysql, jdbc:postgresql, jdbc:sqlite
#
# derby, h2, hsqldb, sqlite can be embedded,
# If no database is available it will be created, for example the url 'jdbc:h2:./testH2' creates a new DB in OpenHab Folder.
#
# Create new database, for example on a MySQL-Server use:
# CREATE DATABASE 'yourDB' CHARACTER SET utf8 COLLATE utf8_general_ci;
-->
<!--
# D A T A B A S E C O N F I G
# Some URL-Examples, 'service' identifies and activates internally the correct jdbc driver.
# required database url like 'jdbc:<service>:<host>[:<port>;<attributes>]'
# jdbc:url=jdbc:derby:./testDerby;create=true
# jdbc:url=jdbc:h2:./testH2
# jdbc:url=jdbc:hsqldb:./testHsqlDb
# jdbc:url=jdbc:mariadb://192.168.0.1:3306/testMariadb
# jdbc:url=jdbc:mysql://192.168.0.1:3306/testMysql
# jdbc:url=jdbc:postgresql://192.168.0.1:5432/testPostgresql
# jdbc:url=jdbc:sqlite:./testSqlite.db
-->
<parameter name="url" type="text" required="true">
<label>Database URL</label>
<description><![CDATA[Defines required database URL and optional path and parameters.<br>
Required database url like 'jdbc:<service>:<host>[:<port>;<attributes>]'<br>
Parameter 'service' is used as identifier for the selected jdbc driver.
URL-Examples:<br>
jdbc:derby:./testDerby;create=true<br>
jdbc:h2:./testH2<br>
jdbc:hsqldb:./testHsqlDb<br>
jdbc:mariadb://192.168.0.1:3306/testMariadb<br>
jdbc:mysql://192.168.0.1:3306/testMysql<br>
jdbc:postgresql://192.168.0.1:5432/testPostgresql<br>
jdbc:sqlite:./testSqlite.db]]></description>
</parameter>
<parameter name="user" type="text" required="false">
<label>Database User</label>
<description><![CDATA[Defines optional database user.]]></description>
</parameter>
<parameter name="password" type="text" required="false">
<label>Database Password</label>
<description><![CDATA[Defines optional database password.]]></description>
</parameter>
<!--
# I T E M O P E R A T I O N S
# optional tweaking SQL datatypes
# see: https://mybatis.github.io/mybatis-3/apidocs/reference/org/apache/ibatis/type/JdbcType.html
# see: http://www.h2database.com/html/datatypes.html
# see: http://www.postgresql.org/docs/9.3/static/datatype.html
# defaults:
#sqltype.CALL = VARCHAR(200)
#sqltype.COLOR = VARCHAR(70)
#sqltype.CONTACT = VARCHAR(6)
#sqltype.DATETIME = DATETIME
#sqltype.DIMMER = TINYINT
#sqltype.LOCATION = VARCHAR(30)
#sqltype.NUMBER = DOUBLE
#sqltype.ROLLERSHUTTER = TINYINT
#sqltype.STRING = VARCHAR(65500)
#sqltype.SWITCH = VARCHAR(6)
# For Itemtype "Number" default decimal digit count (optional, default: 3)
#numberDecimalcount=
-->
<parameter name="sqltype.CALL" type="text" required="false">
<label>SqlType CALL</label>
<description><![CDATA[Overrides used JDBC/SQL datatype for CALL <br>(optional, default: "VARCHAR(200)"). <br>
General about JdbcTypes/SqlTypes see: https://mybatis.github.io/mybatis-3/apidocs/reference/org/apache/ibatis/type/JdbcType.html <br>
see: http://www.h2database.com/html/datatypes.html <br>
see: http://www.postgresql.org/docs/9.5/static/datatype.html]]></description>
</parameter>
<parameter name="sqltype.COLOR" type="text" required="false">
<label>SqlType COLOR</label>
<description><![CDATA[Overrides used JDBC/SQL datatype for COLOR <br>(optional, default: "VARCHAR(70)").]]></description>
</parameter>
<parameter name="sqltype.CONTACT" type="text" required="false">
<label>SqlType CONTACT</label>
<description><![CDATA[Overrides used JDBC/SQL datatype for CONTACT <br>(optional, default: "VARCHAR(6)").]]></description>
</parameter>
<parameter name="sqltype.DATETIME" type="text" required="false">
<label>SqlType DATETIME</label>
<description><![CDATA[Overrides used JDBC/SQL datatype for DATETIME <br>(optional, default: "DATETIME").]]></description>
</parameter>
<parameter name="sqltype.DIMMER" type="text" required="false">
<label>SqlType DIMMER</label>
<description><![CDATA[Overrides used JDBC/SQL datatype for DIMMER <br>(optional, default: "TINYINT").]]></description>
</parameter>
<parameter name="sqltype.LOCATION" type="text" required="false">
<label>SqlType LOCATION</label>
<description><![CDATA[Overrides used JDBC/SQL datatype for LOCATION <br>(optional, default: "VARCHAR(30)").]]></description>
</parameter>
<parameter name="sqltype.NUMBER" type="text" required="false">
<label>SqlType NUMBER</label>
<description><![CDATA[Overrides used JDBC/SQL datatype for NUMBER <br>(optional, default: "DOUBLE").]]></description>
</parameter>
<parameter name="sqltype.ROLLERSHUTTER" type="text" required="false">
<label>SqlType ROLLERSHUTTER</label>
<description><![CDATA[Overrides used JDBC/SQL datatype for ROLLERSHUTTER <br>(optional, default: "TINYINT").]]></description>
</parameter>
<parameter name="sqltype.STRING" type="text" required="false">
<label>SqlType STRING</label>
<description><![CDATA[Overrides used JDBC/SQL datatype for STRING <br>(optional, default: "VARCHAR(65500)").]]></description>
</parameter>
<parameter name="sqltype.SWITCH" type="text" required="false">
<label>SqlType SWITCH</label>
<description><![CDATA[Overrides used JDBC/SQL datatype for SWITCH <br>(optional, default: "VARCHAR(6)").]]></description>
</parameter>
<!--
# T A B L E O P E R A T I O N S
# Tablename Prefix String (optional, default: "item")
# for Migration from MYSQL-Bundle set to 'Item'.
#tableNamePrefix=Item
# Tablename Prefix generation, using Item real names or "item" (optional, default: false -> "item")
# If true, 'tableNamePrefix' is ignored.
#tableUseRealItemNames=
tableUseRealItemNames=true
# Tablename Suffix length (optional, default: 4 -> 0001-9999)
# for Migration from MYSQL-Bundle set to 0.
#tableIdDigitCount=
# Rename existing Tables using tableUseRealItemNames and tableIdDigitCount (optional, default: false)
# USE WITH CARE! Deactivate after Renaming is done!
#rebuildTableNames=true
-->
<parameter name="tableNamePrefix" type="text" required="false">
<label>Tablename Prefix String</label>
<description><![CDATA[Tablename prefix string <br>(optional, default: "item"). <br>
For migration from MYSQL-Bundle set to 'Item'.]]></description>
</parameter>
<parameter name="tableUseRealItemNames" type="text" required="false">
<label>Tablename Realname Generation</label>
<description><![CDATA[Enables Tablename prefix generation per Items realname <br>(optional, default: disabled -> "Tablename Prefix String" is used). <br>
If true, 'Tablename Prefix String' is ignored.]]></description>
<options>
<option value="true">Enable</option>
<option value="false">Disable</option>
</options>
</parameter>
<parameter name="tableIdDigitCount" type="text" required="false">
<label>Tablename Suffix ID Count</label>
<description><![CDATA[Tablename Suffix ID Count <br>(optional, default: 4 -> 0001-9999). <br>
For migration from MYSQL-Bundle set to 0.]]></description>
</parameter>
<parameter name="rebuildTableNames" type="text" required="false">
<label>Tablename Rebuild</label>
<description><![CDATA[Rename existing tables using 'Tablename Realname Generation' and 'Tablename Suffix ID Count', (optional, default: disabled). <br>
USE WITH CARE! Deactivate after renaming is done!]]></description>
<options>
<option value="true">Enable</option>
<option value="false">Disable</option>
</options>
</parameter>
<!--
# D A T A B A S E C O N N E C T I O N S
# Some embeded Databases can handle only one Connection (optional, default: configured per database in packet org.openhab.persistence.jdbc.db.*
)
# see: https://github.com/brettwooldridge/HikariCP/issues/256
# maximumPoolSize = 1
# minimumIdle = 1
-->
<parameter name="maximumPoolSize" type="text" required="false">
<label>Connections Max Pool Size</label>
<description><![CDATA[Overrides max pool size in database connection. <br>(optional, default: differs each Database)<br>
https://github.com/brettwooldridge/HikariCP/issues/256]]></description>
</parameter>
<parameter name="minimumIdle" type="text" required="false">
<label>Connections Min Idle</label>
<description><![CDATA[Overrides min idle database connections. <br>(optional, default: differs each Database)<br>
https://github.com/brettwooldridge/HikariCP/issues/256]]></description>
</parameter>
<!--
# T I M E K E E P I N G
# (optional, default: false)
#enableLogTime=true
-->
<parameter name="enableLogTime" type="text" required="false">
<label>Timekeeping Enable</label>
<description><![CDATA[Enables a time, performance measurement. <br>(optional, default: disabled)]]></description>
<options>
<option value="true">Enable</option>
<option value="false">Disable</option>
</options>
</parameter>
</config-description>
</config-description:config-descriptions>