Codebase as of c53e4aed26 as an initial commit for the shrunk repo

Signed-off-by: Kai Kreuzer <kai@openhab.org>
This commit is contained in:
Kai Kreuzer
2010-02-20 19:23:32 +01:00
committed by Kai Kreuzer
commit bbf1a7fd29
302 changed files with 29726 additions and 0 deletions

View File

@@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="test" value="true"/>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="output" path="target/classes"/>
</classpath>

View File

@@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>org.openhab.persistence.dynamodb</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
</natures>
</projectDescription>

View File

@@ -0,0 +1,13 @@
This content is produced and maintained by the openHAB project.
* Project home: https://www.openhab.org
== Declared Project Licenses
This program and the accompanying materials are made available under the terms
of the Eclipse Public License 2.0 which is available at
https://www.eclipse.org/legal/epl-2.0/.
== Source Code
https://github.com/openhab/openhab-addons

View File

@@ -0,0 +1,174 @@
# Amazon DynamoDB Persistence
This service allows you to persist state updates using the [Amazon DynamoDB](https://aws.amazon.com/dynamodb/) database.
Query functionality is also fully supported.
Features:
* Writing/reading information to relational database systems
* Configurable database table names
* Automatic table creation
## Disclaimer
This service is provided "AS IS", and the user takes full responsibility of any charges or damage to Amazon data.
## Table of Contents
<!-- Using MarkdownTOC plugin for Sublime Text to update the table of contents (TOC) -->
<!-- MarkdownTOC depth=3 autolink=true bracket=round -->
- [Prerequisites](#prerequisites)
- [Setting Up an Amazon Account](#setting-up-an-amazon-account)
- [Configuration](#configuration)
- [Basic configuration](#basic-configuration)
- [Configuration Using Credentials File](#configuration-using-credentials-file)
- [Advanced Configuration](#advanced-configuration)
- [Details](#details)
- [Tables Creation](#tables-creation)
- [Caveats](#caveats)
- [Developer Notes](#developer-notes)
- [Updating Amazon SDK](#updating-amazon-sdk)
<!-- /MarkdownTOC -->
## Prerequisites
You must first set up an Amazon account as described below.
Users are recommended to familiarize themselves with AWS pricing before using this service.
Please note that there might be charges from Amazon when using this service to query/store data to DynamoDB.
See [Amazon DynamoDB pricing pages](https://aws.amazon.com/dynamodb/pricing/) for more details.
Please also note possible [Free Tier](https://aws.amazon.com/free/) benefits.
### Setting Up an Amazon Account
* [Sign up](https://aws.amazon.com/) for Amazon AWS.
* Select the AWS region in the [AWS console](https://console.aws.amazon.com/) using [these instructions](https://docs.aws.amazon.com/awsconsolehelpdocs/latest/gsg/getting-started.html#select-region). Note the region identifier in the URL (e.g. `https://eu-west-1.console.aws.amazon.com/console/home?region=eu-west-1` means that region id is `eu-west-1`).
* **Create user for openHAB with IAM**
* Open Services -> IAM -> Users -> Create new Users. Enter `openhab` to _User names_, keep _Generate an access key for each user_ checked, and finally click _Create_.
* _Show User Security Credentials_ and record the keys displayed
* **Configure user policy to have access for dynamodb**
* Open Services -> IAM -> Policies
* Check _AmazonDynamoDBFullAccess_ and click _Policy actions_ -> _Attach_
* Check the user created in step 2 and click _Attach policy_
## Configuration
This service can be configured in the file `services/dynamodb.cfg`.
### Basic configuration
| Property | Default | Required | Description |
| --------- | ------- | :------: | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| accessKey | | Yes | access key as shown in [Setting up Amazon account](#setting-up-an-amazon-account). |
| secretKey | | Yes | secret key as shown in [Setting up Amazon account](#setting-up-an-amazon-account). |
| region | | Yes | AWS region ID as described in [Setting up Amazon account](#setting-up-an-amazon-account). The region needs to match the region that was used to create the user. |
### Configuration Using Credentials File
Alternatively, instead of specifying `accessKey` and `secretKey`, one can configure a configuration profile file.
| Property | Default | Required | Description |
| ------------------ | ------- | :------: | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| profilesConfigFile | | Yes | path to the credentials file. For example, `/etc/openhab2/aws_creds`. Please note that the user that runs openHAB must have approriate read rights to the credential file. For more details on the Amazon credential file format, see [Amazon documentation](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html). |
| profile | | Yes | name of the profile to use |
| region | | Yes | AWS region ID as described in Step 2 in [Setting up Amazon account](#setting-up-an-amazon-account). The region needs to match the region that was used to create the user. |
Example of service configuration file (`services/dynamodb.cfg`):
```ini
profilesConfigFile=/etc/openhab2/aws_creds
profile=fooprofile
region=eu-west-1
```
Example of credentials file (`/etc/openhab2/aws_creds`):
````ini
[fooprofile]
aws_access_key_id=testAccessKey
aws_secret_access_key=testSecretKey
````
### Advanced Configuration
In addition to the configuration properties above, the following are also available:
| Property | Default | Required | Description |
| -------------------------- | ---------- | :------: | -------------------------------------------------------------------------------------------------- |
| readCapacityUnits | 1 | No | read capacity for the created tables |
| writeCapacityUnits | 1 | No | write capacity for the created tables |
| tablePrefix | `openhab-` | No | table prefix used in the name of created tables |
| bufferCommitIntervalMillis | 1000 | No | Interval to commit (write) buffered data. In milliseconds. |
| bufferSize | 1000 | No | Internal buffer size in datapoints which is used to batch writes to DynamoDB every `bufferCommitIntervalMillis`. |
Typically you should not need to modify parameters related to buffering.
Refer to Amazon documentation on [provisioned throughput](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.ProvisionedThroughput.html) for details on read/write capacity.
All item- and event-related configuration is done in the file `persistence/dynamodb.persist`.
## Details
### Tables Creation
When an item is persisted via this service, a table is created (if necessary).
Currently, the service will create at most two tables for different item types.
The tables will be named `<tablePrefix><item-type>`, where the `<item-type>` is either `bigdecimal` (numeric items) or `string` (string and complex items).
Each table will have three columns: `itemname` (item name), `timeutc` (in ISO 8601 format with millisecond accuracy), and `itemstate` (either a number or string representing item state).
## Buffering
By default, the service is asynchronous which means that data is not written immediately to DynamoDB but instead buffered in-memory.
The size of the buffer, in terms of datapoints, can be configured with `bufferSize`.
Every `bufferCommitIntervalMillis` the whole buffer of data is flushed to DynamoDB.
It is recommended to have the buffering enabled since the synchronous behaviour (writing data immediately) might have adverse impact to the whole system when there is many items persisted at the same time.
The buffering can be disabled by setting `bufferSize` to zero.
The defaults should be suitable in many use cases.
### Caveats
When the tables are created, the read/write capacity is configured according to configuration.
However, the service does not modify the capacity of existing tables.
As a workaround, you can modify the read/write capacity of existing tables using the [Amazon console](https://aws.amazon.com/console/).
## Developer Notes
### Updating Amazon SDK
1. Clean `lib/*`
2. Update SDK version in `scripts/fetch_sdk_pom.xml`. You can use the [maven online repository browser](https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-dynamodb) to find the latest version available online.
3. `scripts/fetch_sdk.sh`
4. Copy `scripts/target/site/dependencies.html` and `scripts/target/dependency/*.jar` to `lib/`
5. Generate `build.properties` entries
`ls lib/*.jar | python -c "import sys; print(' ' + ',\\\\\\n '.join(map(str.strip, sys.stdin.readlines())))"`
6. Generate `META-INF/MANIFEST.MF` `Bundle-ClassPath` entries
`ls lib/*.jar | python -c "import sys; print(' ' + ',\\n '.join(map(str.strip, sys.stdin.readlines())))"`
7. Generate `.classpath` entries
`ls lib/*.jar | python -c "import sys;pre='<classpathentry exported=\"true\" kind=\"lib\" path=\"';post='\"/>'; print('\\t' + pre + (post + '\\n\\t' + pre).join(map(str.strip, sys.stdin.readlines())) + post)"`
After these changes, it's good practice to run integration tests (against live AWS DynamoDB) in `org.openhab.persistence.dynamodb.test` bundle.
See README.md in the test bundle for more information how to execute the tests.
### Running integration tests
To run integration tests, one needs to provide AWS credentials.
Eclipse instructions
1. Run all tests (in package org.openhab.persistence.dynamodb.internal) as JUnit Tests
2. Configure the run configuration, and open Arguments sheet
3. In VM arguments, provide the credentials for AWS
````
-DDYNAMODBTEST_REGION=REGION-ID
-DDYNAMODBTEST_ACCESS=ACCESS-KEY
-DDYNAMODBTEST_SECRET=SECRET
````
The tests will create tables with prefix `dynamodb-integration-tests-`.
Note that when tests are begun, all data is removed from that table!

View File

@@ -0,0 +1,115 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.openhab.addons.bundles</groupId>
<artifactId>org.openhab.addons.reactor.bundles</artifactId>
<version>3.0.0-SNAPSHOT</version>
</parent>
<artifactId>org.openhab.persistence.dynamodb</artifactId>
<name>openHAB Add-ons :: Bundles :: Persistence Service :: DynamoDB</name>
<properties>
<bnd.importpackage>!com.amazonaws.*,!org.joda.convert.*,!com.sun.org.apache.xpath.*,!kotlin,!org.apache.log.*,!org.bouncycastle.*,!org.apache.avalon.*</bnd.importpackage>
</properties>
<dependencies>
<!-- https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-core -->
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-core</artifactId>
<version>1.11.213</version>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-dynamodb</artifactId>
<version>1.11.213</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-kms -->
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-kms</artifactId>
<version>1.11.213</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-s3 -->
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>1.11.213</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.amazonaws/jmespath-java -->
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>jmespath-java</artifactId>
<version>1.11.213</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/software.amazon.ion/ion-java -->
<dependency>
<groupId>software.amazon.ion</groupId>
<artifactId>ion-java</artifactId>
<version>1.0.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.4.4</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-logging/commons-logging -->
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.1.3</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-codec/commons-codec -->
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.9</version>
</dependency>
<!-- https://mvnrepository.com/artifact/joda-time/joda-time -->
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>2.8.1</version>
</dependency>
<!-- The following dependencies are required for test resolution -->
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-annotations -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>2.6.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.6.7</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-databind -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.6.7.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-cbor -->
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-cbor</artifactId>
<version>2.6.7</version>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
mvn -f $DIR/fetch_sdk_pom.xml clean process-sources project-info-reports:dependencies
echo "Check $DIR/target/site/dependencies.html and $DIR/target/dependency"

View File

@@ -0,0 +1,37 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>groupId</groupId>
<artifactId>artifactId</artifactId>
<version>1.0</version>
<dependencies>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-dynamodb</artifactId>
<version>1.11.213</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<phase>process-sources</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>${targetdirectory}</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<features name="org.openhab.persistence.dynamodb-${project.version}" xmlns="http://karaf.apache.org/xmlns/features/v1.4.0">
<repository>mvn:org.openhab.core.features.karaf/org.openhab.core.features.karaf.openhab-core/${ohc.version}/xml/features</repository>
<feature name="openhab-persistence-dynamodb" description="DynamoDB Persistence" version="${project.version}">
<feature>openhab-runtime-base</feature>
<bundle start-level="80">mvn:org.openhab.addons.bundles/org.openhab.persistence.dynamodb/${project.version}</bundle>
<configfile finalname="${openhab.conf}/services/dynamodb.cfg" override="false">mvn:${project.groupId}/openhab-addons-external3/${project.version}/cfg/dynamodb</configfile>
</feature>
</features>

View File

@@ -0,0 +1,129 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.core.items.Item;
import org.openhab.core.persistence.PersistenceService;
import org.openhab.core.types.State;
import org.openhab.core.types.UnDefType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Abstract class for buffered persistence services
*
* @param <T> Type of the state as accepted by the AWS SDK.
*
* @author Sami Salonen - Initial contribution
* @author Kai Kreuzer - Migration to 3.x
*
*/
@NonNullByDefault
public abstract class AbstractBufferedPersistenceService<T> implements PersistenceService {
private static final long BUFFER_OFFER_TIMEOUT_MILLIS = 500;
private final Logger logger = LoggerFactory.getLogger(AbstractBufferedPersistenceService.class);
protected @Nullable BlockingQueue<T> buffer;
private boolean writeImmediately;
protected void resetWithBufferSize(int bufferSize) {
int capacity = Math.max(1, bufferSize);
buffer = new ArrayBlockingQueue<>(capacity, true);
writeImmediately = bufferSize == 0;
}
protected abstract T persistenceItemFromState(String name, State state, ZonedDateTime time);
protected abstract boolean isReadyToStore();
protected abstract void flushBufferedData();
@Override
public void store(Item item) {
store(item, null);
}
@Override
public void store(Item item, @Nullable String alias) {
long storeStart = System.currentTimeMillis();
String uuid = UUID.randomUUID().toString();
if (item.getState() instanceof UnDefType) {
logger.debug("Undefined item state received. Not storing item {}.", item.getName());
return;
}
if (!isReadyToStore()) {
return;
}
if (buffer == null) {
throw new IllegalStateException("Buffer not initialized with resetWithBufferSize. Bug?");
}
ZonedDateTime time = ZonedDateTime.ofInstant(Instant.ofEpochMilli(storeStart), ZoneId.systemDefault());
String realName = item.getName();
String name = (alias != null) ? alias : realName;
State state = item.getState();
T persistenceItem = persistenceItemFromState(name, state, time);
logger.trace("store() called with item {}, which was converted to {} [{}]", item, persistenceItem, uuid);
if (writeImmediately) {
logger.debug("Writing immediately item {} [{}]", realName, uuid);
// We want to write everything immediately
// Synchronous behavior to ensure buffer does not get full.
synchronized (this) {
boolean buffered = addToBuffer(persistenceItem);
assert buffered;
flushBufferedData();
}
} else {
long bufferStart = System.currentTimeMillis();
boolean buffered = addToBuffer(persistenceItem);
if (buffered) {
logger.debug("Buffered item {} in {} ms. Total time for store(): {} [{}]", realName,
System.currentTimeMillis() - bufferStart, System.currentTimeMillis() - storeStart, uuid);
} else {
logger.debug(
"Buffer is full. Writing buffered data immediately and trying again. Consider increasing bufferSize");
// Buffer is full, commit it immediately
flushBufferedData();
boolean buffered2 = addToBuffer(persistenceItem);
if (buffered2) {
logger.debug("Buffered item in {} ms (2nd try, flushed buffer in-between) [{}]",
System.currentTimeMillis() - bufferStart, uuid);
} else {
// The unlikely case happened -- buffer got full again immediately
logger.warn("Buffering failed for the second time -- Too small bufferSize? Discarding data [{}]",
uuid);
}
}
}
}
protected boolean addToBuffer(T persistenceItem) {
try {
return buffer != null && buffer.offer(persistenceItem, BUFFER_OFFER_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
logger.warn("Interrupted when trying to buffer data! Dropping data");
return false;
}
}
}

View File

@@ -0,0 +1,216 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.math.BigDecimal;
import java.text.DateFormat;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.util.HashMap;
import java.util.Map;
import org.openhab.core.items.Item;
import org.openhab.core.library.items.CallItem;
import org.openhab.core.library.items.ColorItem;
import org.openhab.core.library.items.ContactItem;
import org.openhab.core.library.items.DateTimeItem;
import org.openhab.core.library.items.DimmerItem;
import org.openhab.core.library.items.LocationItem;
import org.openhab.core.library.items.NumberItem;
import org.openhab.core.library.items.PlayerItem;
import org.openhab.core.library.items.RollershutterItem;
import org.openhab.core.library.items.StringItem;
import org.openhab.core.library.items.SwitchItem;
import org.openhab.core.library.types.DateTimeType;
import org.openhab.core.library.types.DecimalType;
import org.openhab.core.library.types.HSBType;
import org.openhab.core.library.types.OnOffType;
import org.openhab.core.library.types.OpenClosedType;
import org.openhab.core.library.types.PercentType;
import org.openhab.core.library.types.PlayPauseType;
import org.openhab.core.library.types.PointType;
import org.openhab.core.library.types.RewindFastforwardType;
import org.openhab.core.library.types.StringListType;
import org.openhab.core.library.types.StringType;
import org.openhab.core.library.types.UpDownType;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.core.types.State;
import org.openhab.core.types.UnDefType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Base class for all DynamoDBItem. Represents openHAB Item serialized in a suitable format for the database
*
* @param <T> Type of the state as accepted by the AWS SDK.
*
* @author Sami Salonen - Initial contribution
*/
public abstract class AbstractDynamoDBItem<T> implements DynamoDBItem<T> {
public static final DateTimeFormatter DATEFORMATTER = DateTimeFormatter.ofPattern(DATE_FORMAT)
.withZone(ZoneId.of("UTC"));
private static final String UNDEFINED_PLACEHOLDER = "<org.openhab.core.types.UnDefType.UNDEF>";
private static final Map<Class<? extends Item>, Class<? extends DynamoDBItem<?>>> ITEM_CLASS_MAP = new HashMap<>();
static {
ITEM_CLASS_MAP.put(CallItem.class, DynamoDBStringItem.class);
ITEM_CLASS_MAP.put(ContactItem.class, DynamoDBBigDecimalItem.class);
ITEM_CLASS_MAP.put(DateTimeItem.class, DynamoDBStringItem.class);
ITEM_CLASS_MAP.put(LocationItem.class, DynamoDBStringItem.class);
ITEM_CLASS_MAP.put(NumberItem.class, DynamoDBBigDecimalItem.class);
ITEM_CLASS_MAP.put(RollershutterItem.class, DynamoDBBigDecimalItem.class);
ITEM_CLASS_MAP.put(StringItem.class, DynamoDBStringItem.class);
ITEM_CLASS_MAP.put(SwitchItem.class, DynamoDBBigDecimalItem.class);
ITEM_CLASS_MAP.put(DimmerItem.class, DynamoDBBigDecimalItem.class); // inherited from SwitchItem (!)
ITEM_CLASS_MAP.put(ColorItem.class, DynamoDBStringItem.class); // inherited from DimmerItem
ITEM_CLASS_MAP.put(PlayerItem.class, DynamoDBStringItem.class);
}
public static final Class<DynamoDBItem<?>> getDynamoItemClass(Class<? extends Item> itemClass)
throws NullPointerException {
@SuppressWarnings("unchecked")
Class<DynamoDBItem<?>> dtoclass = (Class<DynamoDBItem<?>>) ITEM_CLASS_MAP.get(itemClass);
if (dtoclass == null) {
throw new IllegalArgumentException(String.format("Unknown item class %s", itemClass));
}
return dtoclass;
}
private final Logger logger = LoggerFactory.getLogger(AbstractDynamoDBItem.class);
protected String name;
protected T state;
protected ZonedDateTime time;
public AbstractDynamoDBItem(String name, T state, ZonedDateTime time) {
this.name = name;
this.state = state;
this.time = time;
}
public static DynamoDBItem<?> fromState(String name, State state, ZonedDateTime time) {
if (state instanceof DecimalType && !(state instanceof HSBType)) {
// also covers PercentType which is inherited from DecimalType
return new DynamoDBBigDecimalItem(name, ((DecimalType) state).toBigDecimal(), time);
} else if (state instanceof OnOffType) {
return new DynamoDBBigDecimalItem(name,
((OnOffType) state) == OnOffType.ON ? BigDecimal.ONE : BigDecimal.ZERO, time);
} else if (state instanceof OpenClosedType) {
return new DynamoDBBigDecimalItem(name,
((OpenClosedType) state) == OpenClosedType.OPEN ? BigDecimal.ONE : BigDecimal.ZERO, time);
} else if (state instanceof UpDownType) {
return new DynamoDBBigDecimalItem(name,
((UpDownType) state) == UpDownType.UP ? BigDecimal.ONE : BigDecimal.ZERO, time);
} else if (state instanceof DateTimeType) {
return new DynamoDBStringItem(name, ((DateTimeType) state).getZonedDateTime().format(DATEFORMATTER), time);
} else if (state instanceof UnDefType) {
return new DynamoDBStringItem(name, UNDEFINED_PLACEHOLDER, time);
} else if (state instanceof StringListType) {
return new DynamoDBStringItem(name, state.toFullString(), time);
} else {
// HSBType, PointType, PlayPauseType and StringType
return new DynamoDBStringItem(name, state.toFullString(), time);
}
}
@Override
public HistoricItem asHistoricItem(final Item item) {
final State[] state = new State[1];
accept(new DynamoDBItemVisitor() {
@Override
public void visit(DynamoDBStringItem dynamoStringItem) {
if (item instanceof ColorItem) {
state[0] = new HSBType(dynamoStringItem.getState());
} else if (item instanceof LocationItem) {
state[0] = new PointType(dynamoStringItem.getState());
} else if (item instanceof PlayerItem) {
String value = dynamoStringItem.getState();
try {
state[0] = PlayPauseType.valueOf(value);
} catch (IllegalArgumentException e) {
state[0] = RewindFastforwardType.valueOf(value);
}
} else if (item instanceof DateTimeItem) {
try {
// Parse ZoneDateTime from string. DATEFORMATTER assumes UTC in case it is not clear
// from the string (should be).
// We convert to default/local timezone for user convenience (e.g. display)
state[0] = new DateTimeType(ZonedDateTime.parse(dynamoStringItem.getState(), DATEFORMATTER)
.withZoneSameInstant(ZoneId.systemDefault()));
} catch (DateTimeParseException e) {
logger.warn("Failed to parse {} as date. Outputting UNDEF instead",
dynamoStringItem.getState());
state[0] = UnDefType.UNDEF;
}
} else if (dynamoStringItem.getState().equals(UNDEFINED_PLACEHOLDER)) {
state[0] = UnDefType.UNDEF;
} else if (item instanceof CallItem) {
String parts = dynamoStringItem.getState();
String[] strings = parts.split(",");
String orig = strings[0];
String dest = strings[1];
state[0] = new StringListType(orig, dest);
} else {
state[0] = new StringType(dynamoStringItem.getState());
}
}
@Override
public void visit(DynamoDBBigDecimalItem dynamoBigDecimalItem) {
if (item instanceof NumberItem) {
state[0] = new DecimalType(dynamoBigDecimalItem.getState());
} else if (item instanceof DimmerItem) {
state[0] = new PercentType(dynamoBigDecimalItem.getState());
} else if (item instanceof SwitchItem) {
state[0] = dynamoBigDecimalItem.getState().compareTo(BigDecimal.ONE) == 0 ? OnOffType.ON
: OnOffType.OFF;
} else if (item instanceof ContactItem) {
state[0] = dynamoBigDecimalItem.getState().compareTo(BigDecimal.ONE) == 0 ? OpenClosedType.OPEN
: OpenClosedType.CLOSED;
} else if (item instanceof RollershutterItem) {
state[0] = new PercentType(dynamoBigDecimalItem.getState());
} else {
logger.warn("Not sure how to convert big decimal item {} to type {}. Using StringType as fallback",
dynamoBigDecimalItem.getName(), item.getClass());
state[0] = new StringType(dynamoBigDecimalItem.getState().toString());
}
}
});
return new DynamoDBHistoricItem(getName(), state[0], getTime());
}
/**
* We define all getter and setters in the child class implement those. Having the getter
* and setter implementations here in the parent class does not work with introspection done by AWS SDK (1.11.56).
*/
/*
* (non-Javadoc)
*
* @see org.openhab.persistence.dynamodb.internal.DynamoItem#accept(org.openhab.persistence.dynamodb.internal.
* DynamoItemVisitor)
*/
@Override
public abstract void accept(DynamoDBItemVisitor visitor);
@Override
public String toString() {
return DateFormat.getDateTimeInstance().format(time) + ": " + name + " -> " + state.toString();
}
}

View File

@@ -0,0 +1,95 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.math.BigDecimal;
import java.math.MathContext;
import java.time.ZonedDateTime;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBAttribute;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBDocument;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBHashKey;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBRangeKey;
/**
* DynamoDBItem for items that can be serialized as DynamoDB number
*
* @author Sami Salonen - Initial contribution
*/
@DynamoDBDocument
public class DynamoDBBigDecimalItem extends AbstractDynamoDBItem<BigDecimal> {
/**
* We get the following error if the BigDecimal has too many digits
* "Attempting to store more than 38 significant digits in a Number"
*
* See "Data types" section in
* http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html
*/
private static final int MAX_DIGITS_SUPPORTED_BY_AMAZON = 38;
public DynamoDBBigDecimalItem() {
this(null, null, null);
}
public DynamoDBBigDecimalItem(String name, BigDecimal state, ZonedDateTime time) {
super(name, state, time);
}
@DynamoDBAttribute(attributeName = DynamoDBItem.ATTRIBUTE_NAME_ITEMSTATE)
@Override
public BigDecimal getState() {
// When serializing this to the wire, we round the number in order to ensure
// that it is within the dynamodb limits
return loseDigits(state);
}
@DynamoDBHashKey(attributeName = DynamoDBItem.ATTRIBUTE_NAME_ITEMNAME)
@Override
public String getName() {
return name;
}
@Override
@DynamoDBRangeKey(attributeName = ATTRIBUTE_NAME_TIMEUTC)
public ZonedDateTime getTime() {
return time;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public void setState(BigDecimal state) {
this.state = state;
}
@Override
public void setTime(ZonedDateTime time) {
this.time = time;
}
@Override
public void accept(org.openhab.persistence.dynamodb.internal.DynamoDBItemVisitor visitor) {
visitor.visit(this);
}
static BigDecimal loseDigits(BigDecimal number) {
if (number == null) {
return null;
}
return number.round(new MathContext(MAX_DIGITS_SUPPORTED_BY_AMAZON));
}
}

View File

@@ -0,0 +1,66 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder;
import com.amazonaws.services.dynamodbv2.document.DynamoDB;
/**
* Shallow wrapper for Dynamo DB wrappers
*
* @author Sami Salonen - Initial contribution
*/
public class DynamoDBClient {
private final Logger logger = LoggerFactory.getLogger(DynamoDBClient.class);
private DynamoDB dynamo;
private AmazonDynamoDB client;
public DynamoDBClient(AWSCredentials credentials, Regions region) {
client = AmazonDynamoDBClientBuilder.standard().withRegion(region)
.withCredentials(new AWSStaticCredentialsProvider(credentials)).build();
dynamo = new DynamoDB(client);
}
public DynamoDBClient(DynamoDBConfig clientConfig) {
this(clientConfig.getCredentials(), clientConfig.getRegion());
}
public AmazonDynamoDB getDynamoClient() {
return client;
}
public DynamoDB getDynamoDB() {
return dynamo;
}
public void shutdown() {
dynamo.shutdown();
}
public boolean checkConnection() {
try {
dynamo.listTables(1).firstPage();
} catch (Exception e) {
logger.warn("Got internal server error when trying to list tables: {}", e.getMessage());
return false;
}
return true;
}
}

View File

@@ -0,0 +1,195 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.util.Arrays;
import java.util.Map;
import java.util.stream.Collectors;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.profile.ProfilesConfigFile;
import com.amazonaws.regions.Regions;
/**
* Configuration for DynamoDB connections
*
* @author Sami Salonen - Initial contribution
*/
@NonNullByDefault
public class DynamoDBConfig {
public static final String DEFAULT_TABLE_PREFIX = "openhab-";
public static final boolean DEFAULT_CREATE_TABLE_ON_DEMAND = true;
public static final long DEFAULT_READ_CAPACITY_UNITS = 1;
public static final long DEFAULT_WRITE_CAPACITY_UNITS = 1;
public static final long DEFAULT_BUFFER_COMMIT_INTERVAL_MILLIS = 1000;
public static final int DEFAULT_BUFFER_SIZE = 1000;
private static final Logger LOGGER = LoggerFactory.getLogger(DynamoDBConfig.class);
private String tablePrefix = DEFAULT_TABLE_PREFIX;
private Regions region;
private AWSCredentials credentials;
private boolean createTable = DEFAULT_CREATE_TABLE_ON_DEMAND;
private long readCapacityUnits = DEFAULT_READ_CAPACITY_UNITS;
private long writeCapacityUnits = DEFAULT_WRITE_CAPACITY_UNITS;
private long bufferCommitIntervalMillis = DEFAULT_BUFFER_COMMIT_INTERVAL_MILLIS;
private int bufferSize = DEFAULT_BUFFER_SIZE;
/**
*
* @param config persistence service configuration
* @return DynamoDB configuration. Returns null in case of configuration errors
*/
public static @Nullable DynamoDBConfig fromConfig(Map<String, Object> config) {
try {
String regionName = (String) config.get("region");
if (regionName == null) {
return null;
}
final Regions region;
try {
region = Regions.fromName(regionName);
} catch (IllegalArgumentException e) {
LOGGER.error("Specify valid AWS region to use, got {}. Valid values include: {}", regionName, Arrays
.asList(Regions.values()).stream().map(r -> r.getName()).collect(Collectors.joining(",")));
return null;
}
AWSCredentials credentials;
String accessKey = (String) config.get("accessKey");
String secretKey = (String) config.get("secretKey");
if (accessKey != null && !accessKey.isBlank() && secretKey != null && !secretKey.isBlank()) {
LOGGER.debug("accessKey and secretKey specified. Using those.");
credentials = new BasicAWSCredentials(accessKey, secretKey);
} else {
LOGGER.debug("accessKey and/or secretKey blank. Checking profilesConfigFile and profile.");
String profilesConfigFile = (String) config.get("profilesConfigFile");
String profile = (String) config.get("profile");
if (profilesConfigFile == null || profilesConfigFile.isBlank() || profile == null
|| profile.isBlank()) {
LOGGER.error("Specify either 1) accessKey and secretKey; or 2) profilesConfigFile and "
+ "profile for providing AWS credentials");
return null;
}
credentials = new ProfilesConfigFile(profilesConfigFile).getCredentials(profile);
}
String table = (String) config.get("tablePrefix");
if (table == null || table.isBlank()) {
LOGGER.debug("Using default table name {}", DEFAULT_TABLE_PREFIX);
table = DEFAULT_TABLE_PREFIX;
}
final boolean createTable;
String createTableParam = (String) config.get("createTable");
if (createTableParam == null || createTableParam.isBlank()) {
LOGGER.debug("Creating table on demand: {}", DEFAULT_CREATE_TABLE_ON_DEMAND);
createTable = DEFAULT_CREATE_TABLE_ON_DEMAND;
} else {
createTable = Boolean.parseBoolean(createTableParam);
}
final long readCapacityUnits;
String readCapacityUnitsParam = (String) config.get("readCapacityUnits");
if (readCapacityUnitsParam == null || readCapacityUnitsParam.isBlank()) {
LOGGER.debug("Read capacity units: {}", DEFAULT_READ_CAPACITY_UNITS);
readCapacityUnits = DEFAULT_READ_CAPACITY_UNITS;
} else {
readCapacityUnits = Long.parseLong(readCapacityUnitsParam);
}
final long writeCapacityUnits;
String writeCapacityUnitsParam = (String) config.get("writeCapacityUnits");
if (writeCapacityUnitsParam == null || writeCapacityUnitsParam.isBlank()) {
LOGGER.debug("Write capacity units: {}", DEFAULT_WRITE_CAPACITY_UNITS);
writeCapacityUnits = DEFAULT_WRITE_CAPACITY_UNITS;
} else {
writeCapacityUnits = Long.parseLong(writeCapacityUnitsParam);
}
final long bufferCommitIntervalMillis;
String bufferCommitIntervalMillisParam = (String) config.get("bufferCommitIntervalMillis");
if (bufferCommitIntervalMillisParam == null || bufferCommitIntervalMillisParam.isBlank()) {
LOGGER.debug("Buffer commit interval millis: {}", DEFAULT_BUFFER_COMMIT_INTERVAL_MILLIS);
bufferCommitIntervalMillis = DEFAULT_BUFFER_COMMIT_INTERVAL_MILLIS;
} else {
bufferCommitIntervalMillis = Long.parseLong(bufferCommitIntervalMillisParam);
}
final int bufferSize;
String bufferSizeParam = (String) config.get("bufferSize");
if (bufferSizeParam == null || bufferSizeParam.isBlank()) {
LOGGER.debug("Buffer size: {}", DEFAULT_BUFFER_SIZE);
bufferSize = DEFAULT_BUFFER_SIZE;
} else {
bufferSize = Integer.parseInt(bufferSizeParam);
}
return new DynamoDBConfig(region, credentials, table, createTable, readCapacityUnits, writeCapacityUnits,
bufferCommitIntervalMillis, bufferSize);
} catch (Exception e) {
LOGGER.error("Error with configuration", e);
return null;
}
}
public DynamoDBConfig(Regions region, AWSCredentials credentials, String table, boolean createTable,
long readCapacityUnits, long writeCapacityUnits, long bufferCommitIntervalMillis, int bufferSize) {
this.region = region;
this.credentials = credentials;
this.tablePrefix = table;
this.createTable = createTable;
this.readCapacityUnits = readCapacityUnits;
this.writeCapacityUnits = writeCapacityUnits;
this.bufferCommitIntervalMillis = bufferCommitIntervalMillis;
this.bufferSize = bufferSize;
}
public AWSCredentials getCredentials() {
return credentials;
}
public String getTablePrefix() {
return tablePrefix;
}
public Regions getRegion() {
return region;
}
public boolean isCreateTable() {
return createTable;
}
public long getReadCapacityUnits() {
return readCapacityUnits;
}
public long getWriteCapacityUnits() {
return writeCapacityUnits;
}
public long getBufferCommitIntervalMillis() {
return bufferCommitIntervalMillis;
}
public int getBufferSize() {
return bufferSize;
}
}

View File

@@ -0,0 +1,58 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.text.DateFormat;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.core.types.State;
/**
* This is a Java bean used to return historic items from Dynamodb.
*
* @author Sami Salonen - Initial contribution
*/
@NonNullByDefault
public class DynamoDBHistoricItem implements HistoricItem {
private final String name;
private final State state;
private final ZonedDateTime timestamp;
public DynamoDBHistoricItem(String name, State state, ZonedDateTime timestamp) {
this.name = name;
this.state = state;
this.timestamp = timestamp;
}
@Override
public String getName() {
return name;
}
@Override
public ZonedDateTime getTimestamp() {
return timestamp;
}
@Override
public State getState() {
return state;
}
@Override
public String toString() {
return DateFormat.getDateTimeInstance().format(timestamp) + ": " + name + " -> " + state.toString();
}
}

View File

@@ -0,0 +1,58 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.ZonedDateTime;
import org.openhab.core.items.Item;
import org.openhab.core.persistence.HistoricItem;
/**
* Represents openHAB Item serialized in a suitable format for the database
*
* @param <T> Type of the state as accepted by the AWS SDK.
*
* @author Sami Salonen - Initial contribution
*/
public interface DynamoDBItem<T> {
static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
static final String ATTRIBUTE_NAME_TIMEUTC = "timeutc";
static final String ATTRIBUTE_NAME_ITEMNAME = "itemname";
static final String ATTRIBUTE_NAME_ITEMSTATE = "itemstate";
/**
* Convert this AbstractDynamoItem as HistoricItem.
*
* @param item Item representing this item. Used to determine item type.
* @return HistoricItem representing this DynamoDBItem.
*/
HistoricItem asHistoricItem(Item item);
String getName();
T getState();
ZonedDateTime getTime();
void setName(String name);
void setState(T state);
void setTime(ZonedDateTime time);
void accept(DynamoDBItemVisitor visitor);
}

View File

@@ -0,0 +1,29 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import org.eclipse.jdt.annotation.NonNullByDefault;
/**
* Visitor for DynamoDBItem
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public interface DynamoDBItemVisitor {
public void visit(DynamoDBBigDecimalItem dynamoBigDecimalItem);
public void visit(DynamoDBStringItem dynamoStringItem);
}

View File

@@ -0,0 +1,569 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.ZonedDateTime;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Deque;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.core.common.NamedThreadFactory;
import org.openhab.core.config.core.ConfigurableService;
import org.openhab.core.items.Item;
import org.openhab.core.items.ItemNotFoundException;
import org.openhab.core.items.ItemRegistry;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.core.persistence.PersistenceItemInfo;
import org.openhab.core.persistence.PersistenceService;
import org.openhab.core.persistence.QueryablePersistenceService;
import org.openhab.core.persistence.strategy.PersistenceStrategy;
import org.openhab.core.types.State;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Reference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper.FailedBatch;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperConfig;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperConfig.PaginationLoadingStrategy;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBQueryExpression;
import com.amazonaws.services.dynamodbv2.datamodeling.PaginatedQueryList;
import com.amazonaws.services.dynamodbv2.document.BatchWriteItemOutcome;
import com.amazonaws.services.dynamodbv2.model.CreateTableRequest;
import com.amazonaws.services.dynamodbv2.model.GlobalSecondaryIndex;
import com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput;
import com.amazonaws.services.dynamodbv2.model.ResourceNotFoundException;
import com.amazonaws.services.dynamodbv2.model.TableDescription;
import com.amazonaws.services.dynamodbv2.model.TableStatus;
import com.amazonaws.services.dynamodbv2.model.WriteRequest;
/**
* This is the implementation of the DynamoDB {@link PersistenceService}. It persists item values
* using the <a href="https://aws.amazon.com/dynamodb/">Amazon DynamoDB</a> database. The states (
* {@link State}) of an {@link Item} are persisted in DynamoDB tables.
*
* The service creates tables automatically, one for numbers, and one for strings.
*
* @see AbstractDynamoDBItem.fromState for details how different items are persisted
*
* @author Sami Salonen - Initial contribution
* @author Kai Kreuzer - Migration to 3.x
*
*/
@NonNullByDefault
@Component(service = { PersistenceService.class,
QueryablePersistenceService.class }, configurationPid = "org.openhab.dynamodb", //
property = Constants.SERVICE_PID + "=org.openhab.dynamodb")
@ConfigurableService(category = "persistence", label = "DynamoDB Persistence Service", description_uri = DynamoDBPersistenceService.CONFIG_URI)
public class DynamoDBPersistenceService extends AbstractBufferedPersistenceService<DynamoDBItem<?>>
implements QueryablePersistenceService {
protected static final String CONFIG_URI = "persistence:dynamodb";
private class ExponentialBackoffRetry implements Runnable {
private int retry;
private Map<String, List<WriteRequest>> unprocessedItems;
private @Nullable Exception lastException;
public ExponentialBackoffRetry(Map<String, List<WriteRequest>> unprocessedItems) {
this.unprocessedItems = unprocessedItems;
}
@Override
public void run() {
logger.debug("Error storing object to dynamo, unprocessed items: {}. Retrying with exponential back-off",
unprocessedItems);
lastException = null;
while (!unprocessedItems.isEmpty() && retry < WAIT_MILLIS_IN_RETRIES.length) {
if (!sleep()) {
// Interrupted
return;
}
retry++;
try {
BatchWriteItemOutcome outcome = DynamoDBPersistenceService.this.db.getDynamoDB()
.batchWriteItemUnprocessed(unprocessedItems);
unprocessedItems = outcome.getUnprocessedItems();
lastException = null;
} catch (AmazonServiceException e) {
if (e instanceof ResourceNotFoundException) {
logger.debug(
"DynamoDB query raised unexpected exception: {}. This might happen if table was recently created",
e.getMessage());
} else {
logger.debug("DynamoDB query raised unexpected exception: {}.", e.getMessage());
}
lastException = e;
continue;
}
}
if (unprocessedItems.isEmpty()) {
logger.debug("After {} retries successfully wrote all unprocessed items", retry);
} else {
logger.warn(
"Even after retries failed to write some items. Last exception: {} {}, unprocessed items: {}",
lastException == null ? "null" : lastException.getClass().getName(),
lastException == null ? "null" : lastException.getMessage(), unprocessedItems);
}
}
private boolean sleep() {
try {
long sleepTime;
if (retry == 1 && lastException != null && lastException instanceof ResourceNotFoundException) {
sleepTime = WAIT_ON_FIRST_RESOURCE_NOT_FOUND_MILLIS;
} else {
sleepTime = WAIT_MILLIS_IN_RETRIES[retry];
}
Thread.sleep(sleepTime);
return true;
} catch (InterruptedException e) {
logger.debug("Interrupted while writing data!");
return false;
}
}
public Map<String, List<WriteRequest>> getUnprocessedItems() {
return unprocessedItems;
}
}
private static final int WAIT_ON_FIRST_RESOURCE_NOT_FOUND_MILLIS = 5000;
private static final int[] WAIT_MILLIS_IN_RETRIES = new int[] { 100, 100, 200, 300, 500 };
private static final String DYNAMODB_THREADPOOL_NAME = "dynamodbPersistenceService";
private final ItemRegistry itemRegistry;
private @Nullable DynamoDBClient db;
private final Logger logger = LoggerFactory.getLogger(DynamoDBPersistenceService.class);
private boolean isProperlyConfigured;
private @NonNullByDefault({}) DynamoDBConfig dbConfig;
private @NonNullByDefault({}) DynamoDBTableNameResolver tableNameResolver;
private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1,
new NamedThreadFactory(DYNAMODB_THREADPOOL_NAME));
private @Nullable ScheduledFuture<?> writeBufferedDataFuture;
@Activate
public DynamoDBPersistenceService(final @Reference ItemRegistry itemRegistry) {
this.itemRegistry = itemRegistry;
}
/**
* For testing. Allows access to underlying DynamoDBClient.
*
* @return DynamoDBClient connected to AWS Dyanamo DB.
*/
@Nullable
DynamoDBClient getDb() {
return db;
}
@Activate
public void activate(final @Nullable BundleContext bundleContext, final Map<String, Object> config) {
resetClient();
dbConfig = DynamoDBConfig.fromConfig(config);
if (dbConfig == null) {
// Configuration was invalid. Abort service activation.
// Error is already logger in fromConfig.
return;
}
tableNameResolver = new DynamoDBTableNameResolver(dbConfig.getTablePrefix());
try {
if (!ensureClient()) {
logger.error("Error creating dynamodb database client. Aborting service activation.");
return;
}
} catch (Exception e) {
logger.error("Error constructing dynamodb client", e);
return;
}
writeBufferedDataFuture = null;
resetWithBufferSize(dbConfig.getBufferSize());
long commitIntervalMillis = dbConfig.getBufferCommitIntervalMillis();
if (commitIntervalMillis > 0) {
writeBufferedDataFuture = scheduler.scheduleWithFixedDelay(new Runnable() {
@Override
public void run() {
try {
DynamoDBPersistenceService.this.flushBufferedData();
} catch (RuntimeException e) {
// We want to catch all unexpected exceptions since all unhandled exceptions make
// ScheduledExecutorService halt the regular running of the task.
// It is better to print out the exception, and try again
// (on next cycle)
logger.warn(
"Execution of scheduled flushing of buffered data failed unexpectedly. Ignoring exception, trying again according to configured commit interval of {} ms.",
commitIntervalMillis, e);
}
}
}, 0, commitIntervalMillis, TimeUnit.MILLISECONDS);
}
isProperlyConfigured = true;
logger.debug("dynamodb persistence service activated");
}
@Deactivate
public void deactivate() {
logger.debug("dynamodb persistence service deactivated");
if (writeBufferedDataFuture != null) {
writeBufferedDataFuture.cancel(false);
writeBufferedDataFuture = null;
}
resetClient();
}
/**
* Initializes DynamoDBClient (db field)
*
* If DynamoDBClient constructor throws an exception, error is logged and false is returned.
*
* @return whether initialization was successful.
*/
private boolean ensureClient() {
if (db == null) {
try {
db = new DynamoDBClient(dbConfig);
} catch (Exception e) {
logger.error("Error constructing dynamodb client", e);
return false;
}
}
return true;
}
@Override
public DynamoDBItem<?> persistenceItemFromState(String name, State state, ZonedDateTime time) {
return AbstractDynamoDBItem.fromState(name, state, time);
}
/**
* Create table (if not present) and wait for table to become active.
*
* Synchronized in order to ensure that at most single thread is creating the table at a time
*
* @param mapper
* @param dtoClass
* @return whether table creation succeeded.
*/
private synchronized boolean createTable(DynamoDBMapper mapper, Class<?> dtoClass) {
if (db == null) {
return false;
}
String tableName;
try {
ProvisionedThroughput provisionedThroughput = new ProvisionedThroughput(dbConfig.getReadCapacityUnits(),
dbConfig.getWriteCapacityUnits());
CreateTableRequest request = mapper.generateCreateTableRequest(dtoClass);
request.setProvisionedThroughput(provisionedThroughput);
if (request.getGlobalSecondaryIndexes() != null) {
for (GlobalSecondaryIndex index : request.getGlobalSecondaryIndexes()) {
index.setProvisionedThroughput(provisionedThroughput);
}
}
tableName = request.getTableName();
try {
db.getDynamoClient().describeTable(tableName);
} catch (ResourceNotFoundException e) {
// No table present, continue with creation
db.getDynamoClient().createTable(request);
} catch (AmazonClientException e) {
logger.error("Table creation failed due to error in describeTable operation", e);
return false;
}
// table found or just created, wait
return waitForTableToBecomeActive(tableName);
} catch (AmazonClientException e) {
logger.error("Exception when creating table", e);
return false;
}
}
private boolean waitForTableToBecomeActive(String tableName) {
try {
logger.debug("Checking if table '{}' is created...", tableName);
final TableDescription tableDescription;
try {
tableDescription = db.getDynamoDB().getTable(tableName).waitForActive();
} catch (IllegalArgumentException e) {
logger.warn("Table '{}' is being deleted: {} {}", tableName, e.getClass().getSimpleName(),
e.getMessage());
return false;
} catch (ResourceNotFoundException e) {
logger.warn("Table '{}' was deleted unexpectedly: {} {}", tableName, e.getClass().getSimpleName(),
e.getMessage());
return false;
}
boolean success = TableStatus.ACTIVE.equals(TableStatus.fromValue(tableDescription.getTableStatus()));
if (success) {
logger.debug("Creation of table '{}' successful, table status is now {}", tableName,
tableDescription.getTableStatus());
} else {
logger.warn("Creation of table '{}' unsuccessful, table status is now {}", tableName,
tableDescription.getTableStatus());
}
return success;
} catch (AmazonClientException e) {
logger.error("Exception when checking table status (describe): {}", e.getMessage());
return false;
} catch (InterruptedException e) {
logger.error("Interrupted while trying to check table status: {}", e.getMessage());
return false;
}
}
private void resetClient() {
if (db == null) {
return;
}
db.shutdown();
db = null;
dbConfig = null;
tableNameResolver = null;
isProperlyConfigured = false;
}
private DynamoDBMapper getDBMapper(String tableName) {
try {
DynamoDBMapperConfig mapperConfig = new DynamoDBMapperConfig.Builder()
.withTableNameOverride(new DynamoDBMapperConfig.TableNameOverride(tableName))
.withPaginationLoadingStrategy(PaginationLoadingStrategy.LAZY_LOADING).build();
return new DynamoDBMapper(db.getDynamoClient(), mapperConfig);
} catch (AmazonClientException e) {
logger.error("Error getting db mapper: {}", e.getMessage());
throw e;
}
}
@Override
protected boolean isReadyToStore() {
return isProperlyConfigured && ensureClient();
}
@Override
public String getId() {
return "dynamodb";
}
@Override
public String getLabel(@Nullable Locale locale) {
return "DynamoDB";
}
@Override
public Set<PersistenceItemInfo> getItemInfo() {
return Collections.emptySet();
}
@Override
protected void flushBufferedData() {
if (buffer != null && buffer.isEmpty()) {
return;
}
logger.debug("Writing buffered data. Buffer size: {}", buffer.size());
for (;;) {
Map<String, Deque<DynamoDBItem<?>>> itemsByTable = readBuffer();
// Write batch of data, one table at a time
for (Entry<String, Deque<DynamoDBItem<?>>> entry : itemsByTable.entrySet()) {
String tableName = entry.getKey();
Deque<DynamoDBItem<?>> batch = entry.getValue();
if (!batch.isEmpty()) {
flushBatch(getDBMapper(tableName), batch);
}
}
if (buffer != null && buffer.isEmpty()) {
break;
}
}
}
private Map<String, Deque<DynamoDBItem<?>>> readBuffer() {
Map<String, Deque<DynamoDBItem<?>>> batchesByTable = new HashMap<>(2);
// Get batch of data
while (!buffer.isEmpty()) {
DynamoDBItem<?> dynamoItem = buffer.poll();
if (dynamoItem == null) {
break;
}
String tableName = tableNameResolver.fromItem(dynamoItem);
Deque<DynamoDBItem<?>> batch = batchesByTable.computeIfAbsent(tableName, new Function<>() {
@Override
public Deque<DynamoDBItem<?>> apply(String t) {
return new ArrayDeque<>();
}
});
batch.add(dynamoItem);
}
return batchesByTable;
}
/**
* Flush batch of data to DynamoDB
*
* @param mapper mapper associated with the batch
* @param batch batch of data to write to DynamoDB
*/
private void flushBatch(DynamoDBMapper mapper, Deque<DynamoDBItem<?>> batch) {
long currentTimeMillis = System.currentTimeMillis();
List<FailedBatch> failed = mapper.batchSave(batch);
for (FailedBatch failedBatch : failed) {
if (failedBatch.getException() instanceof ResourceNotFoundException) {
// Table did not exist. Try again after creating table
retryFlushAfterCreatingTable(mapper, batch, failedBatch);
} else {
logger.debug("Batch failed with {}. Retrying next with exponential back-off",
failedBatch.getException().getMessage());
new ExponentialBackoffRetry(failedBatch.getUnprocessedItems()).run();
}
}
if (failed.isEmpty()) {
logger.debug("flushBatch ended with {} items in {} ms: {}", batch.size(),
System.currentTimeMillis() - currentTimeMillis, batch);
} else {
logger.warn(
"flushBatch ended with {} items in {} ms: {}. There were some failed batches that were retried -- check logs for ERRORs to see if writes were successful",
batch.size(), System.currentTimeMillis() - currentTimeMillis, batch);
}
}
/**
* Retry flushing data after creating table associated with mapper
*
* @param mapper mapper associated with the batch
* @param batch original batch of data. Used for logging and to determine table name
* @param failedBatch failed batch that should be retried
*/
private void retryFlushAfterCreatingTable(DynamoDBMapper mapper, Deque<DynamoDBItem<?>> batch,
FailedBatch failedBatch) {
logger.debug("Table was not found. Trying to create table and try saving again");
if (createTable(mapper, batch.peek().getClass())) {
logger.debug("Table creation successful, trying to save again");
if (!failedBatch.getUnprocessedItems().isEmpty()) {
ExponentialBackoffRetry retry = new ExponentialBackoffRetry(failedBatch.getUnprocessedItems());
retry.run();
if (retry.getUnprocessedItems().isEmpty()) {
logger.debug("Successfully saved items after table creation");
}
}
} else {
logger.warn("Table creation failed. Not storing some parts of batch: {}. Unprocessed items: {}", batch,
failedBatch.getUnprocessedItems());
}
}
@Override
public Iterable<HistoricItem> query(FilterCriteria filter) {
logger.debug("got a query");
if (!isProperlyConfigured) {
logger.debug("Configuration for dynamodb not yet loaded or broken. Not storing item.");
return Collections.emptyList();
}
if (!ensureClient()) {
logger.warn("DynamoDB not connected. Not storing item.");
return Collections.emptyList();
}
String itemName = filter.getItemName();
Item item = getItemFromRegistry(itemName);
if (item == null) {
logger.warn("Could not get item {} from registry!", itemName);
return Collections.emptyList();
}
Class<DynamoDBItem<?>> dtoClass = AbstractDynamoDBItem.getDynamoItemClass(item.getClass());
String tableName = tableNameResolver.fromClass(dtoClass);
DynamoDBMapper mapper = getDBMapper(tableName);
logger.debug("item {} (class {}) will be tried to query using dto class {} from table {}", itemName,
item.getClass(), dtoClass, tableName);
List<HistoricItem> historicItems = new ArrayList<>();
DynamoDBQueryExpression<DynamoDBItem<?>> queryExpression = DynamoDBQueryUtils.createQueryExpression(dtoClass,
filter);
@SuppressWarnings("rawtypes")
final PaginatedQueryList<? extends DynamoDBItem> paginatedList;
try {
paginatedList = mapper.query(dtoClass, queryExpression);
} catch (AmazonServiceException e) {
logger.error(
"DynamoDB query raised unexpected exception: {}. Returning empty collection. "
+ "Status code 400 (resource not found) might occur if table was just created.",
e.getMessage());
return Collections.emptyList();
}
for (int itemIndexOnPage = 0; itemIndexOnPage < filter.getPageSize(); itemIndexOnPage++) {
int itemIndex = filter.getPageNumber() * filter.getPageSize() + itemIndexOnPage;
DynamoDBItem<?> dynamoItem;
try {
dynamoItem = paginatedList.get(itemIndex);
} catch (IndexOutOfBoundsException e) {
logger.debug("Index {} is out-of-bounds", itemIndex);
break;
}
if (dynamoItem != null) {
HistoricItem historicItem = dynamoItem.asHistoricItem(item);
logger.trace("Dynamo item {} converted to historic item: {}", item, historicItem);
historicItems.add(historicItem);
}
}
return historicItems;
}
/**
* Retrieves the item for the given name from the item registry
*
* @param itemName
* @return item with the given name, or null if no such item exists in item registry.
*/
private @Nullable Item getItemFromRegistry(String itemName) {
Item item = null;
try {
if (itemRegistry != null) {
item = itemRegistry.getItem(itemName);
}
} catch (ItemNotFoundException e1) {
logger.error("Unable to get item {} from registry", itemName);
}
return item;
}
@Override
public List<PersistenceStrategy> getDefaultStrategies() {
return List.of(PersistenceStrategy.Globals.RESTORE, PersistenceStrategy.Globals.CHANGE);
}
}

View File

@@ -0,0 +1,148 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.ZonedDateTime;
import java.util.Collections;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.core.persistence.FilterCriteria.Operator;
import org.openhab.core.persistence.FilterCriteria.Ordering;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBQueryExpression;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.amazonaws.services.dynamodbv2.model.ComparisonOperator;
import com.amazonaws.services.dynamodbv2.model.Condition;
/**
* Utility class
*
* @author Sami Salonen - Initial contribution
*/
@NonNullByDefault
public class DynamoDBQueryUtils {
/**
* Construct dynamodb query from filter
*
* @param filter
* @return DynamoDBQueryExpression corresponding to the given FilterCriteria
*/
public static DynamoDBQueryExpression<DynamoDBItem<?>> createQueryExpression(
Class<? extends DynamoDBItem<?>> dtoClass, FilterCriteria filter) {
DynamoDBItem<?> item = getDynamoDBHashKey(dtoClass, filter.getItemName());
final DynamoDBQueryExpression<DynamoDBItem<?>> queryExpression = new DynamoDBQueryExpression<DynamoDBItem<?>>()
.withHashKeyValues(item).withScanIndexForward(filter.getOrdering() == Ordering.ASCENDING)
.withLimit(filter.getPageSize());
maybeAddTimeFilter(queryExpression, filter);
maybeAddStateFilter(filter, queryExpression);
return queryExpression;
}
private static DynamoDBItem<?> getDynamoDBHashKey(Class<? extends DynamoDBItem<?>> dtoClass, String itemName) {
DynamoDBItem<?> item;
try {
item = dtoClass.newInstance();
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
item.setName(itemName);
return item;
}
private static void maybeAddStateFilter(FilterCriteria filter,
final DynamoDBQueryExpression<DynamoDBItem<?>> queryExpression) {
if (filter.getOperator() != null && filter.getState() != null) {
// Convert filter's state to DynamoDBItem in order get suitable string representation for the state
final DynamoDBItem<?> filterState = AbstractDynamoDBItem.fromState(filter.getItemName(), filter.getState(),
ZonedDateTime.now());
queryExpression.setFilterExpression(String.format("%s %s :opstate", DynamoDBItem.ATTRIBUTE_NAME_ITEMSTATE,
operatorAsString(filter.getOperator())));
filterState.accept(new DynamoDBItemVisitor() {
@Override
public void visit(DynamoDBStringItem dynamoStringItem) {
queryExpression.setExpressionAttributeValues(Collections.singletonMap(":opstate",
new AttributeValue().withS(dynamoStringItem.getState())));
}
@Override
public void visit(DynamoDBBigDecimalItem dynamoBigDecimalItem) {
queryExpression.setExpressionAttributeValues(Collections.singletonMap(":opstate",
new AttributeValue().withN(dynamoBigDecimalItem.getState().toPlainString())));
}
});
}
}
private static @Nullable Condition maybeAddTimeFilter(
final DynamoDBQueryExpression<DynamoDBItem<?>> queryExpression, final FilterCriteria filter) {
final Condition timeCondition = constructTimeCondition(filter);
if (timeCondition != null) {
queryExpression.setRangeKeyConditions(
Collections.singletonMap(DynamoDBItem.ATTRIBUTE_NAME_TIMEUTC, timeCondition));
}
return timeCondition;
}
private static @Nullable Condition constructTimeCondition(FilterCriteria filter) {
boolean hasBegin = filter.getBeginDate() != null;
boolean hasEnd = filter.getEndDate() != null;
final Condition timeCondition;
if (!hasBegin && !hasEnd) {
timeCondition = null;
} else if (hasBegin && !hasEnd) {
timeCondition = new Condition().withComparisonOperator(ComparisonOperator.GE).withAttributeValueList(
new AttributeValue().withS(filter.getBeginDate().format(AbstractDynamoDBItem.DATEFORMATTER)));
} else if (!hasBegin && hasEnd) {
timeCondition = new Condition().withComparisonOperator(ComparisonOperator.LE).withAttributeValueList(
new AttributeValue().withS(filter.getEndDate().format(AbstractDynamoDBItem.DATEFORMATTER)));
} else {
timeCondition = new Condition().withComparisonOperator(ComparisonOperator.BETWEEN).withAttributeValueList(
new AttributeValue().withS(filter.getBeginDate().format(AbstractDynamoDBItem.DATEFORMATTER)),
new AttributeValue().withS(filter.getEndDate().format(AbstractDynamoDBItem.DATEFORMATTER)));
}
return timeCondition;
}
/**
* Convert op to string suitable for dynamodb filter expression
*
* @param op
* @return string representation corresponding to the given the Operator
*/
private static String operatorAsString(Operator op) {
switch (op) {
case EQ:
return "=";
case NEQ:
return "<>";
case LT:
return "<";
case LTE:
return "<=";
case GT:
return ">";
case GTE:
return ">=";
default:
throw new IllegalStateException("Unknown operator " + op);
}
}
}

View File

@@ -0,0 +1,75 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.ZonedDateTime;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBAttribute;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBDocument;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBHashKey;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBRangeKey;
/**
* DynamoDBItem for items that can be serialized as DynamoDB string
*
* @author Sami Salonen - Initial contribution
*/
@DynamoDBDocument
public class DynamoDBStringItem extends AbstractDynamoDBItem<String> {
public DynamoDBStringItem() {
this(null, null, null);
}
public DynamoDBStringItem(String name, String state, ZonedDateTime time) {
super(name, state, time);
}
@DynamoDBAttribute(attributeName = DynamoDBItem.ATTRIBUTE_NAME_ITEMSTATE)
@Override
public String getState() {
return state;
}
@DynamoDBHashKey(attributeName = DynamoDBItem.ATTRIBUTE_NAME_ITEMNAME)
@Override
public String getName() {
return name;
}
@Override
@DynamoDBRangeKey(attributeName = ATTRIBUTE_NAME_TIMEUTC)
public ZonedDateTime getTime() {
return time;
}
@Override
public void accept(org.openhab.persistence.dynamodb.internal.DynamoDBItemVisitor visitor) {
visitor.visit(this);
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public void setState(String state) {
this.state = state;
}
@Override
public void setTime(ZonedDateTime time) {
this.time = time;
}
}

View File

@@ -0,0 +1,65 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
/**
* The DynamoDBTableNameResolver resolves DynamoDB table name for a given item.
*
* @author Sami Salonen - Initial contribution
*
*/
public class DynamoDBTableNameResolver {
private final String tablePrefix;
public DynamoDBTableNameResolver(String tablePrefix) {
this.tablePrefix = tablePrefix;
}
public String fromItem(DynamoDBItem<?> item) {
final String[] tableName = new String[1];
// Use the visitor pattern to deduce the table name
item.accept(new DynamoDBItemVisitor() {
@Override
public void visit(DynamoDBBigDecimalItem dynamoBigDecimalItem) {
tableName[0] = tablePrefix + "bigdecimal";
}
@Override
public void visit(DynamoDBStringItem dynamoStringItem) {
tableName[0] = tablePrefix + "string";
}
});
return tableName[0];
}
/**
* Construct DynamoDBTableNameResolver corresponding to DynamoDBItem class
*
* @param clazz
* @return
*/
public String fromClass(Class<? extends DynamoDBItem<?>> clazz) {
DynamoDBItem<?> dummy;
try {
// Construct new instance of this class (assuming presense no-argument constructor)
// in order to re-use fromItem(DynamoDBItem) constructor
dummy = clazz.getConstructor().newInstance();
} catch (Exception e) {
throw new IllegalStateException(String.format("Could not find suitable constructor for class %s", clazz));
}
return this.fromItem(dummy);
}
}

View File

@@ -0,0 +1,118 @@
<?xml version="1.0" encoding="UTF-8"?>
<config-description:config-descriptions
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:config-description="https://openhab.org/schemas/config-description/v1.0.0"
xsi:schemaLocation="https://openhab.org/schemas/config-description/v1.0.0
https://openhab.org/schemas/config-description-1.0.0.xsd">
<config-description uri="persistence:dynamodb">
<!--
############################ Amazon DynamoDB Persistence Service ##################################
#
# The following parameters are used to configure Amazon DynamoDB Persistence.
#
# Further details at https://docs.openhab.org/addons/persistence/dynamodb/readme.html
#
#
# CONNECTION SETTINGS (follow OPTION 1 or OPTION 2)
#
# OPTION 1 (using accessKey and secretKey)
#accessKey=AKIAIOSFODNN7EXAMPLE
#secretKey=3+AAAAABBBbbbCCCCCCdddddd+7mnbIOLH
#region=eu-west-1
# OPTION 2 (using profilesConfigFile and profile)
# where profilesConfigFile points to AWS credentials file
#profilesConfigFile=/etc/openhab2/aws_creds
#profile=fooprofile
#region=eu-west-1
# Credentials file example:
#
# [fooprofile]
# aws_access_key_id=AKIAIOSFODNN7EXAMPLE
# aws_secret_access_key=3+AAAAABBBbbbCCCCCCdddddd+7mnbIOLH
#
# ADVANCED CONFIGURATION (OPTIONAL)
#
# read capacity for the created tables
#readCapacityUnits=1
# write capacity for the created tables
#writeCapacityUnits=1
# table prefix used in the name of created tables
#tablePrefix=openhab-
-->
<parameter name="region" type="text" required="true">
<label>AWS region ID</label>
<description><![CDATA[AWS region ID as described in Step 2 in Setting up Amazon account.<br />
The region needs to match the region of the AWS user that will access Amazon DynamoDB.<br />
For example, eu-west-1.]]></description>
</parameter>
<parameter name="accessKey" type="text" required="false">
<label>AWS access key</label>
<description><![CDATA[AWS access key of the AWS user that will access Amazon DynamoDB.
<br />
Give either 1) access key and secret key, or 2) credentials file and profile name.
]]></description>
</parameter>
<parameter name="secretKey" type="text" required="false">
<label>AWS secret key</label>
<description><![CDATA[AWS secret key of the AWS user that will access Amazon DynamoDB.
<br />
Give either 1) access key and secret key, or 2) credentials file and profile name.
]]></description>
</parameter>
<parameter name="profilesConfigFile" type="text" required="false">
<label>AWS credentials file</label>
<description><![CDATA[Path to the AWS credentials file. <br />
For example, /etc/openhab2/aws_creds.
Please note that the user that runs openHAB must have approriate read rights to the credential file.
<br />
Give either 1) access key and secret key, or 2) credentials file and profile name.
]]></description>
</parameter>
<parameter name="profile" type="text" required="false">
<label>Profile name</label>
<description><![CDATA[Name of the profile to use in AWS credentials file
<br />
Give either 1) access key and secret key, or 2) credentials file and profile name.
]]></description>
</parameter>
<parameter name="readCapacityUnits" type="integer" required="false" min="1">
<description>Read capacity for the created tables. Default is 1.</description>
<label>Read capacity</label>
<advanced>true</advanced>
</parameter>
<parameter name="writeCapacityUnits" type="integer" required="false" min="1">
<label>Write capacity</label>
<description>Write capacity for the created tables. Default is 1.</description>
<advanced>true</advanced>
</parameter>
<parameter name="tablePrefix" type="text" required="false">
<label>Table prefix</label>
<description>Table prefix used in the name of created tables. Default is openhab-</description>
<advanced>true</advanced>
</parameter>
</config-description>
</config-description:config-descriptions>

View File

@@ -0,0 +1,95 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.IOException;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.junit.jupiter.api.Test;
import org.openhab.core.library.items.CallItem;
import org.openhab.core.library.items.ColorItem;
import org.openhab.core.library.items.ContactItem;
import org.openhab.core.library.items.DateTimeItem;
import org.openhab.core.library.items.DimmerItem;
import org.openhab.core.library.items.LocationItem;
import org.openhab.core.library.items.NumberItem;
import org.openhab.core.library.items.PlayerItem;
import org.openhab.core.library.items.RollershutterItem;
import org.openhab.core.library.items.StringItem;
import org.openhab.core.library.items.SwitchItem;
/**
* Test for AbstractDynamoDBItem.getDynamoItemClass
*
* @author Sami Salonen - Initial contribution
*/
@NonNullByDefault
public class AbstractDynamoDBItemGetDynamoItemClassTest {
@Test
public void testCallItem() throws IOException {
assertEquals(DynamoDBStringItem.class, AbstractDynamoDBItem.getDynamoItemClass(CallItem.class));
}
@Test
public void testContactItem() throws IOException {
assertEquals(DynamoDBBigDecimalItem.class, AbstractDynamoDBItem.getDynamoItemClass(ContactItem.class));
}
@Test
public void testDateTimeItem() throws IOException {
assertEquals(DynamoDBStringItem.class, AbstractDynamoDBItem.getDynamoItemClass(DateTimeItem.class));
}
@Test
public void testStringItem() throws IOException {
assertEquals(DynamoDBStringItem.class, AbstractDynamoDBItem.getDynamoItemClass(StringItem.class));
}
@Test
public void testLocationItem() throws IOException {
assertEquals(DynamoDBStringItem.class, AbstractDynamoDBItem.getDynamoItemClass(LocationItem.class));
}
@Test
public void testNumberItem() throws IOException {
assertEquals(DynamoDBBigDecimalItem.class, AbstractDynamoDBItem.getDynamoItemClass(NumberItem.class));
}
@Test
public void testColorItem() throws IOException {
assertEquals(DynamoDBStringItem.class, AbstractDynamoDBItem.getDynamoItemClass(ColorItem.class));
}
@Test
public void testDimmerItem() throws IOException {
assertEquals(DynamoDBBigDecimalItem.class, AbstractDynamoDBItem.getDynamoItemClass(DimmerItem.class));
}
@Test
public void testPlayerItem() throws IOException {
assertEquals(DynamoDBStringItem.class, AbstractDynamoDBItem.getDynamoItemClass(PlayerItem.class));
}
@Test
public void testRollershutterItem() throws IOException {
assertEquals(DynamoDBBigDecimalItem.class, AbstractDynamoDBItem.getDynamoItemClass(RollershutterItem.class));
}
@Test
public void testOnOffTypeWithSwitchItem() throws IOException {
assertEquals(DynamoDBBigDecimalItem.class, AbstractDynamoDBItem.getDynamoItemClass(SwitchItem.class));
}
}

View File

@@ -0,0 +1,261 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import static org.junit.jupiter.api.Assertions.*;
import java.io.IOException;
import java.math.BigDecimal;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.TimeZone;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.junit.jupiter.api.Test;
import org.openhab.core.items.Item;
import org.openhab.core.library.items.CallItem;
import org.openhab.core.library.items.ColorItem;
import org.openhab.core.library.items.ContactItem;
import org.openhab.core.library.items.DateTimeItem;
import org.openhab.core.library.items.DimmerItem;
import org.openhab.core.library.items.LocationItem;
import org.openhab.core.library.items.NumberItem;
import org.openhab.core.library.items.RollershutterItem;
import org.openhab.core.library.items.StringItem;
import org.openhab.core.library.items.SwitchItem;
import org.openhab.core.library.types.DateTimeType;
import org.openhab.core.library.types.DecimalType;
import org.openhab.core.library.types.HSBType;
import org.openhab.core.library.types.OnOffType;
import org.openhab.core.library.types.OpenClosedType;
import org.openhab.core.library.types.PercentType;
import org.openhab.core.library.types.PointType;
import org.openhab.core.library.types.StringListType;
import org.openhab.core.library.types.StringType;
import org.openhab.core.library.types.UpDownType;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.core.types.State;
import org.openhab.core.types.UnDefType;
/**
* Test for AbstractDynamoDBItem.fromState and AbstractDynamoDBItem.asHistoricItem for all kind of states
*
* @author Sami Salonen - Initial contribution
*/
@NonNullByDefault
public class AbstractDynamoDBItemSerializationTest {
private final ZonedDateTime date = ZonedDateTime.ofInstant(Instant.ofEpochSecond(400), ZoneId.systemDefault());
/**
* Generic function testing serialization of item state to internal format in DB. In other words, conversion of
* Item with state to DynamoDBItem
*
* @param state item state
* @param expectedState internal format in DB representing the item state
* @return dynamo db item
* @throws IOException
*/
public DynamoDBItem<?> testStateGeneric(State state, Object expectedState) throws IOException {
DynamoDBItem<?> dbItem = AbstractDynamoDBItem.fromState("item1", state, date);
assertEquals("item1", dbItem.getName());
assertEquals(date, dbItem.getTime());
Object actualState = dbItem.getState();
if (expectedState instanceof BigDecimal) {
BigDecimal expectedRounded = DynamoDBBigDecimalItem.loseDigits(((BigDecimal) expectedState));
assertEquals(0, expectedRounded.compareTo((BigDecimal) actualState),
String.format("Expected state %s (%s but with some digits lost) did not match actual state %s",
expectedRounded, expectedState, actualState));
} else {
assertEquals(expectedState, actualState);
}
return dbItem;
}
/**
* Test state deserialization, that is DynamoDBItem conversion to HistoricItem
*
* @param dbItem dynamo db item
* @param item parameter for DynamoDBItem.asHistoricItem
* @param expectedState Expected state of the historic item. DecimalTypes are compared with reduced accuracy
* @return
* @throws IOException
*/
public HistoricItem testAsHistoricGeneric(DynamoDBItem<?> dbItem, Item item, Object expectedState)
throws IOException {
HistoricItem historicItem = dbItem.asHistoricItem(item);
assertEquals("item1", historicItem.getName());
assertEquals(date, historicItem.getTimestamp());
assertEquals(expectedState.getClass(), historicItem.getState().getClass());
if (expectedState instanceof DecimalType) {
// serialization loses accuracy, take this into consideration
BigDecimal expectedRounded = DynamoDBBigDecimalItem
.loseDigits(((DecimalType) expectedState).toBigDecimal());
BigDecimal actual = ((DecimalType) historicItem.getState()).toBigDecimal();
assertEquals(0, expectedRounded.compareTo(actual),
String.format("Expected state %s (%s but with some digits lost) did not match actual state %s",
expectedRounded, expectedState, actual));
} else {
assertEquals(expectedState, historicItem.getState());
}
return historicItem;
}
@Test
public void testUndefWithNumberItem() throws IOException {
final DynamoDBItem<?> dbitem = testStateGeneric(UnDefType.UNDEF, "<org.openhab.core.types.UnDefType.UNDEF>");
assertTrue(dbitem instanceof DynamoDBStringItem);
testAsHistoricGeneric(dbitem, new NumberItem("foo"), UnDefType.UNDEF);
}
@Test
public void testCallTypeWithCallItem() throws IOException {
final DynamoDBItem<?> dbitem = testStateGeneric(new StringListType("origNum", "destNum"), "origNum,destNum");
testAsHistoricGeneric(dbitem, new CallItem("foo"), new StringListType("origNum", "destNum"));
}
@Test
public void testOpenClosedTypeWithContactItem() throws IOException {
final DynamoDBItem<?> dbitemOpen = testStateGeneric(OpenClosedType.CLOSED, BigDecimal.ZERO);
testAsHistoricGeneric(dbitemOpen, new ContactItem("foo"), OpenClosedType.CLOSED);
final DynamoDBItem<?> dbitemClosed = testStateGeneric(OpenClosedType.OPEN, BigDecimal.ONE);
testAsHistoricGeneric(dbitemClosed, new ContactItem("foo"), OpenClosedType.OPEN);
}
@Test
public void testDateTimeTypeWithDateTimeItem() throws IOException {
ZonedDateTime zdt = ZonedDateTime.parse("2016-05-01T13:46:00.050Z");
DynamoDBItem<?> dbitem = testStateGeneric(new DateTimeType(zdt.toString()), "2016-05-01T13:46:00.050Z");
testAsHistoricGeneric(dbitem, new DateTimeItem("foo"),
new DateTimeType(zdt.withZoneSameInstant(ZoneId.systemDefault())));
}
@Test
public void testDateTimeTypeWithStringItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new DateTimeType(ZonedDateTime.parse("2016-05-01T13:46:00.050Z")),
"2016-05-01T13:46:00.050Z");
testAsHistoricGeneric(dbitem, new StringItem("foo"), new StringType("2016-05-01T13:46:00.050Z"));
}
@Test
public void testDateTimeTypeLocalWithDateTimeItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new DateTimeType("2016-07-17T19:38:07.050+0300"),
"2016-07-17T16:38:07.050Z");
ZonedDateTime expectedZdt = Instant.ofEpochMilli(1468773487050L).atZone(ZoneId.systemDefault());
testAsHistoricGeneric(dbitem, new DateTimeItem("foo"), new DateTimeType(expectedZdt));
}
@Test
public void testDateTimeTypeLocalWithStringItem() throws IOException {
Instant instant = Instant.ofEpochMilli(1468773487050L); // GMT: Sun, 17 Jul 2016 16:38:07.050 GMT
ZonedDateTime zdt = instant.atZone(TimeZone.getTimeZone("GMT+03:00").toZoneId());
DynamoDBItem<?> dbitem = testStateGeneric(new DateTimeType(zdt), "2016-07-17T16:38:07.050Z");
testAsHistoricGeneric(dbitem, new StringItem("foo"), new StringType("2016-07-17T16:38:07.050Z"));
}
@Test
public void testPointTypeWithLocationItem() throws IOException {
final PointType point = new PointType(new DecimalType(60.3), new DecimalType(30.2), new DecimalType(510.90));
String expected = point.getLatitude().toBigDecimal().toString() + ","
+ point.getLongitude().toBigDecimal().toString() + "," + point.getAltitude().toBigDecimal().toString();
DynamoDBItem<?> dbitem = testStateGeneric(point, expected);
testAsHistoricGeneric(dbitem, new LocationItem("foo"), point);
}
@Test
public void testDecimalTypeWithNumberItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new DecimalType("3.2"), new BigDecimal("3.2"));
testAsHistoricGeneric(dbitem, new NumberItem("foo"), new DecimalType("3.2"));
}
@Test
public void testPercentTypeWithColorItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new PercentType(new BigDecimal("3.2")), new BigDecimal("3.2"));
testAsHistoricGeneric(dbitem, new ColorItem("foo"), new PercentType(new BigDecimal("3.2")));
}
@Test
public void testPercentTypeWithDimmerItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new PercentType(new BigDecimal("3.2")), new BigDecimal("3.2"));
testAsHistoricGeneric(dbitem, new DimmerItem("foo"), new PercentType(new BigDecimal("3.2")));
}
@Test
public void testPercentTypeWithRollerShutterItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new PercentType(new BigDecimal("3.2")), new BigDecimal("3.2"));
testAsHistoricGeneric(dbitem, new RollershutterItem("foo"), new PercentType(new BigDecimal("3.2")));
}
@Test
public void testPercentTypeWithNumberItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new PercentType(new BigDecimal("3.2")), new BigDecimal("3.2"));
// note: comes back as DecimalType instead of the original PercentType
testAsHistoricGeneric(dbitem, new NumberItem("foo"), new DecimalType(new BigDecimal("3.2")));
}
@Test
public void testUpDownTypeWithRollershutterItem() throws IOException {
// note: comes back as PercentType instead of the original UpDownType
DynamoDBItem<?> dbItemDown = testStateGeneric(UpDownType.DOWN, BigDecimal.ZERO);
testAsHistoricGeneric(dbItemDown, new RollershutterItem("foo"), new PercentType(BigDecimal.ZERO));
DynamoDBItem<?> dbItemUp = testStateGeneric(UpDownType.UP, BigDecimal.ONE);
testAsHistoricGeneric(dbItemUp, new RollershutterItem("foo"), new PercentType(BigDecimal.ONE));
}
@Test
public void testStringTypeWithStringItem() throws IOException {
DynamoDBItem<?> dbitem = testStateGeneric(new StringType("foo bar"), "foo bar");
testAsHistoricGeneric(dbitem, new StringItem("foo"), new StringType("foo bar"));
}
@Test
public void testOnOffTypeWithColorItem() throws IOException {
DynamoDBItem<?> dbitemOff = testStateGeneric(OnOffType.OFF, BigDecimal.ZERO);
testAsHistoricGeneric(dbitemOff, new ColorItem("foo"), new PercentType(BigDecimal.ZERO));
DynamoDBItem<?> dbitemOn = testStateGeneric(OnOffType.ON, BigDecimal.ONE);
testAsHistoricGeneric(dbitemOn, new ColorItem("foo"), new PercentType(BigDecimal.ONE));
}
@Test
public void testOnOffTypeWithDimmerItem() throws IOException {
DynamoDBItem<?> dbitemOff = testStateGeneric(OnOffType.OFF, BigDecimal.ZERO);
testAsHistoricGeneric(dbitemOff, new DimmerItem("foo"), new PercentType(BigDecimal.ZERO));
DynamoDBItem<?> dbitemOn = testStateGeneric(OnOffType.ON, BigDecimal.ONE);
testAsHistoricGeneric(dbitemOn, new DimmerItem("foo"), new PercentType(BigDecimal.ONE));
}
@Test
public void testOnOffTypeWithSwitchItem() throws IOException {
DynamoDBItem<?> dbitemOff = testStateGeneric(OnOffType.OFF, BigDecimal.ZERO);
testAsHistoricGeneric(dbitemOff, new SwitchItem("foo"), OnOffType.OFF);
DynamoDBItem<?> dbitemOn = testStateGeneric(OnOffType.ON, BigDecimal.ONE);
testAsHistoricGeneric(dbitemOn, new SwitchItem("foo"), OnOffType.ON);
}
@Test
public void testHSBTypeWithColorItem() throws IOException {
HSBType hsb = new HSBType(new DecimalType(1.5), new PercentType(new BigDecimal(2.5)),
new PercentType(new BigDecimal(3.5)));
DynamoDBItem<?> dbitem = testStateGeneric(hsb, "1.5,2.5,3.5");
testAsHistoricGeneric(dbitem, new ColorItem("foo"), hsb);
}
}

View File

@@ -0,0 +1,355 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import static org.junit.jupiter.api.Assertions.*;
import static org.junit.jupiter.api.Assumptions.assumeTrue;
import java.time.ZonedDateTime;
import java.util.Iterator;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.core.persistence.FilterCriteria.Operator;
import org.openhab.core.persistence.FilterCriteria.Ordering;
import org.openhab.core.persistence.HistoricItem;
import org.openhab.core.types.State;
/**
* This is abstract class helping with integration testing the persistence service. Different kind of queries are tested
* against actual dynamo db database.
*
*
* Inheritor of this base class needs to store two states of one item in a static method annotated with @BeforeClass.
* This
* static
* class should update the private static fields
* beforeStore (date before storing anything), afterStore1 (after storing first item, but before storing second item),
* afterStore2 (after storing second item). The item name must correspond to getItemName. The first state needs to be
* smaller than the second state.
*
* To have more comprehensive tests, the inheritor class can define getQueryItemStateBetween to provide a value between
* the two states. Null can be used to omit the additional tests.
*
*
* See DimmerItemIntegrationTest for example how to use this base class.
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public abstract class AbstractTwoItemIntegrationTest extends BaseIntegrationTest {
protected static @Nullable ZonedDateTime beforeStore;
protected static @Nullable ZonedDateTime afterStore1;
protected static @Nullable ZonedDateTime afterStore2;
protected abstract String getItemName();
/**
* State of the time item stored first, should be smaller than the second value
*
* @return
*/
protected abstract State getFirstItemState();
/**
* State of the time item stored second, should be larger than the first value
*
* @return
*/
protected abstract State getSecondItemState();
/**
* State that is between the first and second. Use null to omit extended tests using this value.
*
* @return
*/
protected abstract @Nullable State getQueryItemStateBetween();
protected void assertStateEquals(State expected, State actual) {
assertEquals(expected, actual);
}
@BeforeAll
public static void checkService() throws InterruptedException {
String msg = "DynamoDB integration tests will be skipped. Did you specify AWS credentials for testing? "
+ "See BaseIntegrationTest for more details";
if (service == null) {
System.out.println(msg);
}
assumeTrue(service != null, msg);
}
/**
* Asserts that iterable contains correct items and nothing else
*
*/
private void assertIterableContainsItems(Iterable<HistoricItem> iterable, boolean ascending) {
Iterator<HistoricItem> iterator = iterable.iterator();
HistoricItem actual1 = iterator.next();
HistoricItem actual2 = iterator.next();
assertFalse(iterator.hasNext());
for (HistoricItem actual : new HistoricItem[] { actual1, actual2 }) {
assertEquals(getItemName(), actual.getName());
}
HistoricItem storedFirst;
HistoricItem storedSecond;
if (ascending) {
storedFirst = actual1;
storedSecond = actual2;
} else {
storedFirst = actual2;
storedSecond = actual1;
}
assertStateEquals(getFirstItemState(), storedFirst.getState());
assertTrue(storedFirst.getTimestamp().toInstant().isBefore(afterStore1.toInstant()));
assertTrue(storedFirst.getTimestamp().toInstant().isAfter(beforeStore.toInstant()));
assertStateEquals(getSecondItemState(), storedSecond.getState());
assertTrue(storedSecond.getTimestamp().toInstant().isBefore(afterStore2.toInstant()));
assertTrue(storedSecond.getTimestamp().toInstant().isAfter(afterStore1.toInstant()));
}
@Test
public void testQueryUsingName() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOrdering(Ordering.ASCENDING);
criteria.setItemName(getItemName());
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
assertIterableContainsItems(iterable, true);
}
@Test
public void testQueryUsingNameAndStart() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOrdering(Ordering.ASCENDING);
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
assertIterableContainsItems(iterable, true);
}
@Test
public void testQueryUsingNameAndStartNoMatch() {
FilterCriteria criteria = new FilterCriteria();
criteria.setItemName(getItemName());
criteria.setBeginDate(afterStore2);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
assertFalse(iterable.iterator().hasNext());
}
@Test
public void testQueryUsingNameAndEnd() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOrdering(Ordering.ASCENDING);
criteria.setItemName(getItemName());
criteria.setEndDate(afterStore2);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
assertIterableContainsItems(iterable, true);
}
@Test
public void testQueryUsingNameAndEndNoMatch() {
FilterCriteria criteria = new FilterCriteria();
criteria.setItemName(getItemName());
criteria.setEndDate(beforeStore);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
assertFalse(iterable.iterator().hasNext());
}
@Test
public void testQueryUsingNameAndStartAndEnd() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOrdering(Ordering.ASCENDING);
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
criteria.setEndDate(afterStore2);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
assertIterableContainsItems(iterable, true);
}
@Test
public void testQueryUsingNameAndStartAndEndDesc() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOrdering(Ordering.DESCENDING);
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
criteria.setEndDate(afterStore2);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
assertIterableContainsItems(iterable, false);
}
@Test
public void testQueryUsingNameAndStartAndEndWithNEQOperator() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOperator(Operator.NEQ);
criteria.setState(getSecondItemState());
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
criteria.setEndDate(afterStore2);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
Iterator<HistoricItem> iterator = iterable.iterator();
HistoricItem actual1 = iterator.next();
assertFalse(iterator.hasNext());
assertStateEquals(getFirstItemState(), actual1.getState());
assertTrue(actual1.getTimestamp().toInstant().isBefore(afterStore1.toInstant()));
assertTrue(actual1.getTimestamp().toInstant().isAfter(beforeStore.toInstant()));
}
@Test
public void testQueryUsingNameAndStartAndEndWithEQOperator() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOperator(Operator.EQ);
criteria.setState(getFirstItemState());
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
criteria.setEndDate(afterStore2);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
Iterator<HistoricItem> iterator = iterable.iterator();
HistoricItem actual1 = iterator.next();
assertFalse(iterator.hasNext());
assertStateEquals(getFirstItemState(), actual1.getState());
assertTrue(actual1.getTimestamp().toInstant().isBefore(afterStore1.toInstant()));
assertTrue(actual1.getTimestamp().toInstant().isAfter(beforeStore.toInstant()));
}
@Test
public void testQueryUsingNameAndStartAndEndWithLTOperator() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOperator(Operator.LT);
criteria.setState(getSecondItemState());
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
criteria.setEndDate(afterStore2);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
Iterator<HistoricItem> iterator = iterable.iterator();
HistoricItem actual1 = iterator.next();
assertFalse(iterator.hasNext());
assertStateEquals(getFirstItemState(), actual1.getState());
assertTrue(actual1.getTimestamp().toInstant().isBefore(afterStore1.toInstant()));
assertTrue(actual1.getTimestamp().toInstant().isAfter(beforeStore.toInstant()));
}
@Test
public void testQueryUsingNameAndStartAndEndWithLTOperatorNoMatch() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOperator(Operator.LT);
criteria.setState(getFirstItemState());
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
criteria.setEndDate(afterStore2);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
Iterator<HistoricItem> iterator = iterable.iterator();
assertFalse(iterator.hasNext());
}
@Test
public void testQueryUsingNameAndStartAndEndWithLTEOperator() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOperator(Operator.LTE);
criteria.setState(getFirstItemState());
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
criteria.setEndDate(afterStore2);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
Iterator<HistoricItem> iterator = iterable.iterator();
HistoricItem actual1 = iterator.next();
assertFalse(iterator.hasNext());
assertStateEquals(getFirstItemState(), actual1.getState());
assertTrue(actual1.getTimestamp().toInstant().isBefore(afterStore1.toInstant()));
assertTrue(actual1.getTimestamp().toInstant().isAfter(beforeStore.toInstant()));
}
@Test
public void testQueryUsingNameAndStartAndEndWithGTOperator() {
// Skip for subclasses which have null "state between"
assumeTrue(getQueryItemStateBetween() != null);
FilterCriteria criteria = new FilterCriteria();
criteria.setOperator(Operator.GT);
criteria.setState(getQueryItemStateBetween());
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
criteria.setEndDate(afterStore2);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
Iterator<HistoricItem> iterator = iterable.iterator();
HistoricItem actual1 = iterator.next();
assertFalse(iterator.hasNext());
assertStateEquals(getSecondItemState(), actual1.getState());
assertTrue(actual1.getTimestamp().toInstant().isBefore(afterStore2.toInstant()));
assertTrue(actual1.getTimestamp().toInstant().isAfter(afterStore1.toInstant()));
}
@Test
public void testQueryUsingNameAndStartAndEndWithGTOperatorNoMatch() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOperator(Operator.GT);
criteria.setState(getSecondItemState());
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
criteria.setEndDate(afterStore2);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
Iterator<HistoricItem> iterator = iterable.iterator();
assertFalse(iterator.hasNext());
}
@Test
public void testQueryUsingNameAndStartAndEndWithGTEOperator() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOperator(Operator.GTE);
criteria.setState(getSecondItemState());
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
criteria.setEndDate(afterStore2);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
Iterator<HistoricItem> iterator = iterable.iterator();
HistoricItem actual1 = iterator.next();
assertFalse(iterator.hasNext());
assertStateEquals(getSecondItemState(), actual1.getState());
assertTrue(actual1.getTimestamp().toInstant().isBefore(afterStore2.toInstant()));
assertTrue(actual1.getTimestamp().toInstant().isAfter(afterStore1.toInstant()));
}
@Test
public void testQueryUsingNameAndStartAndEndFirst() {
FilterCriteria criteria = new FilterCriteria();
criteria.setOrdering(Ordering.ASCENDING);
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
criteria.setEndDate(afterStore1);
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
Iterator<HistoricItem> iterator = iterable.iterator();
HistoricItem actual1 = iterator.next();
assertFalse(iterator.hasNext());
assertStateEquals(getFirstItemState(), actual1.getState());
assertTrue(actual1.getTimestamp().toInstant().isBefore(afterStore1.toInstant()));
assertTrue(actual1.getTimestamp().toInstant().isAfter(beforeStore.toInstant()));
}
@Test
public void testQueryUsingNameAndStartAndEndNoMatch() {
FilterCriteria criteria = new FilterCriteria();
criteria.setItemName(getItemName());
criteria.setBeginDate(beforeStore);
criteria.setEndDate(beforeStore); // sic
Iterable<HistoricItem> iterable = BaseIntegrationTest.service.query(criteria);
assertFalse(iterable.iterator().hasNext());
}
}

View File

@@ -0,0 +1,213 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.stream.Stream;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.common.registry.RegistryChangeListener;
import org.openhab.core.items.Item;
import org.openhab.core.items.ItemNotFoundException;
import org.openhab.core.items.ItemNotUniqueException;
import org.openhab.core.items.ItemRegistry;
import org.openhab.core.items.RegistryHook;
import org.openhab.core.library.items.CallItem;
import org.openhab.core.library.items.ColorItem;
import org.openhab.core.library.items.ContactItem;
import org.openhab.core.library.items.DateTimeItem;
import org.openhab.core.library.items.DimmerItem;
import org.openhab.core.library.items.LocationItem;
import org.openhab.core.library.items.NumberItem;
import org.openhab.core.library.items.PlayerItem;
import org.openhab.core.library.items.RollershutterItem;
import org.openhab.core.library.items.StringItem;
import org.openhab.core.library.items.SwitchItem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.amazonaws.services.dynamodbv2.model.ResourceNotFoundException;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class BaseIntegrationTest {
protected static final Logger LOGGER = LoggerFactory.getLogger(DynamoDBPersistenceService.class);
protected static @Nullable DynamoDBPersistenceService service;
protected static final Map<String, Item> ITEMS = new HashMap<>();
static {
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "trace");
}
@BeforeAll
public static void initService() throws InterruptedException {
ITEMS.put("dimmer", new DimmerItem("dimmer"));
ITEMS.put("number", new NumberItem("number"));
ITEMS.put("string", new StringItem("string"));
ITEMS.put("switch", new SwitchItem("switch"));
ITEMS.put("contact", new ContactItem("contact"));
ITEMS.put("color", new ColorItem("color"));
ITEMS.put("rollershutter", new RollershutterItem("rollershutter"));
ITEMS.put("datetime", new DateTimeItem("datetime"));
ITEMS.put("call", new CallItem("call"));
ITEMS.put("location", new LocationItem("location"));
ITEMS.put("player_playpause", new PlayerItem("player_playpause"));
ITEMS.put("player_rewindfastforward", new PlayerItem("player_rewindfastforward"));
service = new DynamoDBPersistenceService(new ItemRegistry() {
@Override
public Collection<Item> getItems(String pattern) {
throw new UnsupportedOperationException();
}
@Override
public Collection<Item> getItems() {
throw new UnsupportedOperationException();
}
@Override
public Item getItemByPattern(String name) throws ItemNotFoundException, ItemNotUniqueException {
throw new UnsupportedOperationException();
}
@Override
public Item getItem(String name) throws ItemNotFoundException {
Item item = ITEMS.get(name);
if (item == null) {
throw new ItemNotFoundException(name);
}
return item;
}
@Override
public void addRegistryChangeListener(RegistryChangeListener<Item> listener) {
throw new UnsupportedOperationException();
}
@Override
public Collection<Item> getAll() {
throw new UnsupportedOperationException();
}
@Override
public Stream<Item> stream() {
throw new UnsupportedOperationException();
}
@Override
public @Nullable Item get(String key) {
throw new UnsupportedOperationException();
}
@Override
public void removeRegistryChangeListener(RegistryChangeListener<Item> listener) {
throw new UnsupportedOperationException();
}
@Override
public Item add(Item element) {
throw new UnsupportedOperationException();
}
@Override
public @Nullable Item update(Item element) {
throw new UnsupportedOperationException();
}
@Override
public @Nullable Item remove(String key) {
throw new UnsupportedOperationException();
}
@Override
public Collection<Item> getItemsOfType(String type) {
throw new UnsupportedOperationException();
}
@Override
public Collection<Item> getItemsByTag(String... tags) {
throw new UnsupportedOperationException();
}
@Override
public Collection<Item> getItemsByTagAndType(String type, String... tags) {
throw new UnsupportedOperationException();
}
@Override
public <T extends Item> Collection<T> getItemsByTag(Class<T> typeFilter, String... tags) {
throw new UnsupportedOperationException();
}
@Override
public @Nullable Item remove(String itemName, boolean recursive) {
throw new UnsupportedOperationException();
}
@Override
public void addRegistryHook(RegistryHook<Item> hook) {
throw new UnsupportedOperationException();
}
@Override
public void removeRegistryHook(RegistryHook<Item> hook) {
throw new UnsupportedOperationException();
}
});
Map<String, Object> config = new HashMap<>();
config.put("region", System.getProperty("DYNAMODBTEST_REGION"));
config.put("accessKey", System.getProperty("DYNAMODBTEST_ACCESS"));
config.put("secretKey", System.getProperty("DYNAMODBTEST_SECRET"));
config.put("tablePrefix", "dynamodb-integration-tests-");
// Disable buffering
config.put("bufferSize", "0");
for (Entry<String, Object> entry : config.entrySet()) {
if (entry.getValue() == null) {
LOGGER.warn(String.format(
"Expecting %s to have value for integration tests. Integration tests will be skipped",
entry.getKey()));
service = null;
return;
}
}
service.activate(null, config);
clearData();
}
protected static void clearData() {
// Clear data
for (String table : new String[] { "dynamodb-integration-tests-bigdecimal",
"dynamodb-integration-tests-string" }) {
try {
service.getDb().getDynamoClient().deleteTable(table);
service.getDb().getDynamoDB().getTable(table).waitForDelete();
} catch (ResourceNotFoundException e) {
} catch (InterruptedException e) {
LOGGER.warn("Interrupted! Table might not have been deleted");
}
}
}
}

View File

@@ -0,0 +1,83 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.library.items.CallItem;
import org.openhab.core.library.types.StringListType;
import org.openhab.core.types.State;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class CallItemIntegrationTest extends AbstractTwoItemIntegrationTest {
private static final String NAME = "call";
// values are encoded as part1,part2 - ordering goes wrt strings
private static final StringListType STATE1 = new StringListType("part1", "foo");
private static final StringListType STATE2 = new StringListType("part3", "bar");
private static final StringListType STATE_BETWEEN = new StringListType("part2", "zzz");
@BeforeAll
public static void storeData() throws InterruptedException {
CallItem item = (CallItem) ITEMS.get(NAME);
item.setState(STATE1);
beforeStore = ZonedDateTime.now();
Thread.sleep(10);
service.store(item);
afterStore1 = ZonedDateTime.now();
Thread.sleep(10);
item.setState(STATE2);
service.store(item);
Thread.sleep(10);
afterStore2 = ZonedDateTime.now();
LOGGER.info("Created item between {} and {}", AbstractDynamoDBItem.DATEFORMATTER.format(beforeStore),
AbstractDynamoDBItem.DATEFORMATTER.format(afterStore1));
}
@Override
protected String getItemName() {
return NAME;
}
@Override
protected State getFirstItemState() {
return STATE1;
}
@Override
protected State getSecondItemState() {
return STATE2;
}
@Override
protected @Nullable State getQueryItemStateBetween() {
return STATE_BETWEEN;
}
@Override
protected void assertStateEquals(State expected, State actual) {
// Since CallType.equals is broken, toString is used as workaround
assertEquals(expected.toString(), actual.toString());
}
}

View File

@@ -0,0 +1,88 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.math.BigDecimal;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.library.items.ColorItem;
import org.openhab.core.library.types.DecimalType;
import org.openhab.core.library.types.HSBType;
import org.openhab.core.library.types.PercentType;
import org.openhab.core.types.State;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class ColorItemIntegrationTest extends AbstractTwoItemIntegrationTest {
private static HSBType color(double hue, int saturation, int brightness) {
return new HSBType(new DecimalType(hue), new PercentType(saturation), new PercentType(brightness));
}
private static HSBType color(String hue, int saturation, int brightness) {
return new HSBType(new DecimalType(new BigDecimal(hue)), new PercentType(saturation),
new PercentType(brightness));
}
private static final String NAME = "color";
// values are encoded as <hue>,<saturation>,<brightness>, ordering goes wrt strings
private static final HSBType STATE1 = color("3.1493842988948932984298384892384823984923849238492839483294893", 50,
50);
private static final HSBType STATE2 = color(75, 100, 90);
private static final HSBType STATE_BETWEEN = color(60, 50, 50);
@BeforeAll
public static void storeData() throws InterruptedException {
ColorItem item = (ColorItem) ITEMS.get(NAME);
item.setState(STATE1);
beforeStore = ZonedDateTime.now();
Thread.sleep(10);
service.store(item);
afterStore1 = ZonedDateTime.now();
Thread.sleep(10);
item.setState(STATE2);
service.store(item);
Thread.sleep(10);
afterStore2 = ZonedDateTime.now();
LOGGER.info("Created item between {} and {}", AbstractDynamoDBItem.DATEFORMATTER.format(beforeStore),
AbstractDynamoDBItem.DATEFORMATTER.format(afterStore1));
}
@Override
protected String getItemName() {
return NAME;
}
@Override
protected State getFirstItemState() {
return STATE1;
}
@Override
protected State getSecondItemState() {
return STATE2;
}
@Override
protected @Nullable State getQueryItemStateBetween() {
return STATE_BETWEEN;
}
}

View File

@@ -0,0 +1,75 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.library.items.ContactItem;
import org.openhab.core.library.types.OnOffType;
import org.openhab.core.library.types.OpenClosedType;
import org.openhab.core.types.State;
/**
* @author Sami Salonen - Initial contribution
*/
@NonNullByDefault
public class ContactItemIntegrationTest extends AbstractTwoItemIntegrationTest {
private static final String NAME = "contact";
private static final OpenClosedType STATE1 = OpenClosedType.CLOSED;
private static final OpenClosedType STATE2 = OpenClosedType.OPEN;
// There is no OpenClosedType state value between CLOSED and OPEN.
// Omit extended query tests AbstractTwoItemIntegrationTest by setting stateBetween to null.
private static final @Nullable OnOffType STATE_BETWEEN = null;
@BeforeAll
public static void storeData() throws InterruptedException {
ContactItem item = (ContactItem) ITEMS.get(NAME);
item.setState(STATE1);
beforeStore = ZonedDateTime.now();
Thread.sleep(10);
service.store(item);
afterStore1 = ZonedDateTime.now();
Thread.sleep(10);
item.setState(STATE2);
service.store(item);
Thread.sleep(10);
afterStore2 = ZonedDateTime.now();
LOGGER.info("Created item between {} and {}", AbstractDynamoDBItem.DATEFORMATTER.format(beforeStore),
AbstractDynamoDBItem.DATEFORMATTER.format(afterStore1));
}
@Override
protected String getItemName() {
return NAME;
}
@Override
protected State getFirstItemState() {
return STATE1;
}
@Override
protected State getSecondItemState() {
return STATE2;
}
@Override
protected @Nullable State getQueryItemStateBetween() {
return STATE_BETWEEN;
}
}

View File

@@ -0,0 +1,80 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.library.items.DateTimeItem;
import org.openhab.core.library.types.DateTimeType;
import org.openhab.core.types.State;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class DateTimeItemIntegrationTest extends AbstractTwoItemIntegrationTest {
private static final String NAME = "datetime";
private static final ZonedDateTime ZDT1 = ZonedDateTime.parse("2016-06-15T10:00:00Z");
private static final ZonedDateTime ZDT2 = ZonedDateTime.parse("2016-06-15T16:00:00.123Z");
private static final ZonedDateTime ZDT_BETWEEN = ZonedDateTime.parse("2016-06-15T14:00:00Z");
private static final DateTimeType STATE1 = new DateTimeType(ZDT1);
private static final DateTimeType STATE2 = new DateTimeType(ZDT2);
private static final DateTimeType STATE_BETWEEN = new DateTimeType(ZDT_BETWEEN);
@BeforeAll
public static void storeData() throws InterruptedException {
DateTimeItem item = (DateTimeItem) ITEMS.get(NAME);
item.setState(STATE1);
beforeStore = ZonedDateTime.now();
Thread.sleep(10);
service.store(item);
afterStore1 = ZonedDateTime.now();
Thread.sleep(10);
item.setState(STATE2);
service.store(item);
Thread.sleep(10);
afterStore2 = ZonedDateTime.now();
LOGGER.info("Created item between {} and {}", AbstractDynamoDBItem.DATEFORMATTER.format(beforeStore),
AbstractDynamoDBItem.DATEFORMATTER.format(afterStore1));
}
@Override
protected String getItemName() {
return NAME;
}
@Override
protected State getFirstItemState() {
return STATE1;
}
@Override
protected State getSecondItemState() {
return STATE2;
}
@Override
protected @Nullable State getQueryItemStateBetween() {
return STATE_BETWEEN;
}
}

View File

@@ -0,0 +1,76 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.library.items.DimmerItem;
import org.openhab.core.library.types.PercentType;
import org.openhab.core.types.State;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class DimmerItemIntegrationTest extends AbstractTwoItemIntegrationTest {
private static final String NAME = "dimmer";
private static final PercentType STATE1 = new PercentType(66);
private static final PercentType STATE2 = new PercentType(68);
private static final PercentType STATE_BETWEEN = new PercentType(67);
@BeforeAll
public static void storeData() throws InterruptedException {
DimmerItem item = (DimmerItem) ITEMS.get(NAME);
item.setState(STATE1);
beforeStore = ZonedDateTime.now();
Thread.sleep(10);
service.store(item);
afterStore1 = ZonedDateTime.now();
Thread.sleep(10);
item.setState(STATE2);
service.store(item);
Thread.sleep(10);
afterStore2 = ZonedDateTime.now();
LOGGER.info("Created item between {} and {}", AbstractDynamoDBItem.DATEFORMATTER.format(beforeStore),
AbstractDynamoDBItem.DATEFORMATTER.format(afterStore1));
}
@Override
protected String getItemName() {
return NAME;
}
@Override
protected State getFirstItemState() {
return STATE1;
}
@Override
protected State getSecondItemState() {
return STATE2;
}
@Override
protected @Nullable State getQueryItemStateBetween() {
return STATE_BETWEEN;
}
}

View File

@@ -0,0 +1,221 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import static org.junit.jupiter.api.Assertions.*;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import com.amazonaws.regions.Regions;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class DynamoDBConfigTest {
private static Map<String, Object> mapFrom(String... args) {
assert args.length % 2 == 0;
Map<String, String> config = new HashMap<>();
for (int i = 1; i < args.length; i++) {
String key = args[i - 1];
String val = args[i];
config.put(key, val);
}
return Collections.unmodifiableMap(config);
}
public @TempDir @NonNullByDefault({}) File folder;
@Test
public void testEmpty() throws Exception {
assertNull(DynamoDBConfig.fromConfig(new HashMap<>()));
}
@Test
public void testInvalidRegion() throws Exception {
assertNull(DynamoDBConfig.fromConfig(Collections.singletonMap("region", "foobie")));
}
@Test
public void testRegionOnly() throws Exception {
assertNull(DynamoDBConfig.fromConfig(Collections.singletonMap("region", "eu-west-1")));
}
@Test
public void testRegionWithAccessKeys() throws Exception {
DynamoDBConfig fromConfig = DynamoDBConfig
.fromConfig(mapFrom("region", "eu-west-1", "accessKey", "access1", "secretKey", "secret1"));
assertEquals(Regions.EU_WEST_1, fromConfig.getRegion());
assertEquals("access1", fromConfig.getCredentials().getAWSAccessKeyId());
assertEquals("secret1", fromConfig.getCredentials().getAWSSecretKey());
assertEquals("openhab-", fromConfig.getTablePrefix());
assertEquals(true, fromConfig.isCreateTable());
assertEquals(1, fromConfig.getReadCapacityUnits());
assertEquals(1, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}
@Test
public void testRegionWithProfilesConfigFile() throws Exception {
Path credsFile = Files.createFile(Paths.get(folder.getPath(), "creds"));
Files.write(
credsFile, ("[fooprofile]\n" + "aws_access_key_id=testAccessKey\n"
+ "aws_secret_access_key=testSecretKey\n" + "aws_session_token=testSessionToken\n").getBytes(),
StandardOpenOption.TRUNCATE_EXISTING);
DynamoDBConfig fromConfig = DynamoDBConfig.fromConfig(mapFrom("region", "eu-west-1", "profilesConfigFile",
credsFile.toAbsolutePath().toString(), "profile", "fooprofile"));
assertEquals(Regions.EU_WEST_1, fromConfig.getRegion());
assertEquals("openhab-", fromConfig.getTablePrefix());
assertEquals(true, fromConfig.isCreateTable());
assertEquals(1, fromConfig.getReadCapacityUnits());
assertEquals(1, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}
@Test
public void testEmptyConfiguration() throws Exception {
assertNull(DynamoDBConfig.fromConfig(mapFrom()));
}
@Test
public void testRegionWithInvalidProfilesConfigFile() throws Exception {
Path credsFile = Files.createFile(Paths.get(folder.getPath(), "creds"));
Files.write(credsFile,
("[fooprofile]\n" + "aws_access_key_idINVALIDKEY=testAccessKey\n"
+ "aws_secret_access_key=testSecretKey\n" + "aws_session_token=testSessionToken\n").getBytes(),
StandardOpenOption.TRUNCATE_EXISTING);
assertNull(DynamoDBConfig.fromConfig(mapFrom("region", "eu-west-1", "profilesConfigFile",
credsFile.toFile().getAbsolutePath(), "profile", "fooprofile")));
}
@Test
public void testRegionWithProfilesConfigFileMissingProfile() throws Exception {
Path credsFile = Files.createFile(Paths.get(folder.getPath(), "creds"));
Files.write(
credsFile, ("[fooprofile]\n" + "aws_access_key_id=testAccessKey\n"
+ "aws_secret_access_key=testSecretKey\n" + "aws_session_token=testSessionToken\n").getBytes(),
StandardOpenOption.TRUNCATE_EXISTING);
assertNull(DynamoDBConfig.fromConfig(
mapFrom("region", "eu-west-1", "profilesConfigFile", credsFile.toAbsolutePath().toString())));
}
@Test
public void testRegionWithAccessKeysWithPrefix() throws Exception {
DynamoDBConfig fromConfig = DynamoDBConfig.fromConfig(mapFrom("region", "eu-west-1", "accessKey", "access1",
"secretKey", "secret1", "tablePrefix", "foobie-"));
assertEquals(Regions.EU_WEST_1, fromConfig.getRegion());
assertEquals("access1", fromConfig.getCredentials().getAWSAccessKeyId());
assertEquals("secret1", fromConfig.getCredentials().getAWSSecretKey());
assertEquals("foobie-", fromConfig.getTablePrefix());
assertEquals(true, fromConfig.isCreateTable());
assertEquals(1, fromConfig.getReadCapacityUnits());
assertEquals(1, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}
@Test
public void testRegionWithAccessKeysWithPrefixWithCreateTable() throws Exception {
DynamoDBConfig fromConfig = DynamoDBConfig.fromConfig(
mapFrom("region", "eu-west-1", "accessKey", "access1", "secretKey", "secret1", "createTable", "false"));
assertEquals(Regions.EU_WEST_1, fromConfig.getRegion());
assertEquals("access1", fromConfig.getCredentials().getAWSAccessKeyId());
assertEquals("secret1", fromConfig.getCredentials().getAWSSecretKey());
assertEquals("openhab-", fromConfig.getTablePrefix());
assertEquals(false, fromConfig.isCreateTable());
assertEquals(1, fromConfig.getReadCapacityUnits());
assertEquals(1, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}
@Test
public void testRegionWithAccessKeysWithPrefixWithReadCapacityUnits() throws Exception {
DynamoDBConfig fromConfig = DynamoDBConfig.fromConfig(mapFrom("region", "eu-west-1", "accessKey", "access1",
"secretKey", "secret1", "readCapacityUnits", "5"));
assertEquals(Regions.EU_WEST_1, fromConfig.getRegion());
assertEquals("access1", fromConfig.getCredentials().getAWSAccessKeyId());
assertEquals("secret1", fromConfig.getCredentials().getAWSSecretKey());
assertEquals("openhab-", fromConfig.getTablePrefix());
assertEquals(true, fromConfig.isCreateTable());
assertEquals(5, fromConfig.getReadCapacityUnits());
assertEquals(1, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}
@Test
public void testRegionWithAccessKeysWithPrefixWithWriteCapacityUnits() throws Exception {
DynamoDBConfig fromConfig = DynamoDBConfig.fromConfig(mapFrom("region", "eu-west-1", "accessKey", "access1",
"secretKey", "secret1", "writeCapacityUnits", "5"));
assertEquals(Regions.EU_WEST_1, fromConfig.getRegion());
assertEquals("access1", fromConfig.getCredentials().getAWSAccessKeyId());
assertEquals("secret1", fromConfig.getCredentials().getAWSSecretKey());
assertEquals("openhab-", fromConfig.getTablePrefix());
assertEquals(true, fromConfig.isCreateTable());
assertEquals(1, fromConfig.getReadCapacityUnits());
assertEquals(5, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}
@Test
public void testRegionWithAccessKeysWithPrefixWithReadWriteCapacityUnits() throws Exception {
DynamoDBConfig fromConfig = DynamoDBConfig.fromConfig(mapFrom("region", "eu-west-1", "accessKey", "access1",
"secretKey", "secret1", "readCapacityUnits", "3", "writeCapacityUnits", "5"));
assertEquals(Regions.EU_WEST_1, fromConfig.getRegion());
assertEquals("access1", fromConfig.getCredentials().getAWSAccessKeyId());
assertEquals("secret1", fromConfig.getCredentials().getAWSSecretKey());
assertEquals("openhab-", fromConfig.getTablePrefix());
assertEquals(true, fromConfig.isCreateTable());
assertEquals(3, fromConfig.getReadCapacityUnits());
assertEquals(5, fromConfig.getWriteCapacityUnits());
assertEquals(1000L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(1000, fromConfig.getBufferSize());
}
@Test
public void testRegionWithAccessKeysWithPrefixWithReadWriteCapacityUnitsWithBufferSettings() throws Exception {
DynamoDBConfig fromConfig = DynamoDBConfig.fromConfig(
mapFrom("region", "eu-west-1", "accessKey", "access1", "secretKey", "secret1", "readCapacityUnits", "3",
"writeCapacityUnits", "5", "bufferCommitIntervalMillis", "501", "bufferSize", "112"));
assertEquals(Regions.EU_WEST_1, fromConfig.getRegion());
assertEquals("access1", fromConfig.getCredentials().getAWSAccessKeyId());
assertEquals("secret1", fromConfig.getCredentials().getAWSSecretKey());
assertEquals("openhab-", fromConfig.getTablePrefix());
assertEquals(true, fromConfig.isCreateTable());
assertEquals(3, fromConfig.getReadCapacityUnits());
assertEquals(5, fromConfig.getWriteCapacityUnits());
assertEquals(501L, fromConfig.getBufferCommitIntervalMillis());
assertEquals(112, fromConfig.getBufferSize());
}
}

View File

@@ -0,0 +1,38 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.junit.jupiter.api.Test;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class DynamoDBTableNameResolverTest {
@Test
public void testWithDynamoDBBigDecimalItem() {
assertEquals("prefixbigdecimal",
new DynamoDBTableNameResolver("prefix").fromItem(new DynamoDBBigDecimalItem()));
}
@Test
public void testWithDynamoDBStringItem() {
assertEquals("prefixstring", new DynamoDBTableNameResolver("prefix").fromItem(new DynamoDBStringItem()));
}
}

View File

@@ -0,0 +1,78 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.library.items.LocationItem;
import org.openhab.core.library.types.DecimalType;
import org.openhab.core.library.types.PointType;
import org.openhab.core.types.State;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class LocationItemIntegrationTest extends AbstractTwoItemIntegrationTest {
private static final String NAME = "location";
// values are encoded as lat,lon[,alt] , ordering goes wrt strings
private static final PointType STATE1 = new PointType(
new DecimalType("60.012033100120453345435345345345346365434630300230230032020393149"), new DecimalType(30.),
new DecimalType(3.0));
private static final PointType STATE2 = new PointType(new DecimalType(61.0), new DecimalType(30.));
private static final PointType STATE_BETWEEN = new PointType(new DecimalType(60.5), new DecimalType(30.));
@BeforeAll
public static void storeData() throws InterruptedException {
LocationItem item = (LocationItem) ITEMS.get(NAME);
item.setState(STATE1);
beforeStore = ZonedDateTime.now();
Thread.sleep(10);
service.store(item);
afterStore1 = ZonedDateTime.now();
Thread.sleep(10);
item.setState(STATE2);
service.store(item);
Thread.sleep(10);
afterStore2 = ZonedDateTime.now();
LOGGER.info("Created item between {} and {}", AbstractDynamoDBItem.DATEFORMATTER.format(beforeStore),
AbstractDynamoDBItem.DATEFORMATTER.format(afterStore1));
}
@Override
protected String getItemName() {
return NAME;
}
@Override
protected State getFirstItemState() {
return STATE1;
}
@Override
protected State getSecondItemState() {
return STATE2;
}
@Override
protected @Nullable State getQueryItemStateBetween() {
return STATE_BETWEEN;
}
}

View File

@@ -0,0 +1,91 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.math.BigDecimal;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.library.items.NumberItem;
import org.openhab.core.library.types.DecimalType;
import org.openhab.core.types.State;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class NumberItemIntegrationTest extends AbstractTwoItemIntegrationTest {
private static final String NAME = "number";
// On purpose we have super accurate number here (testing limits of aws)
private static final DecimalType STATE1 = new DecimalType(new BigDecimal(
"-32343243.193490838904389298049802398048923849032809483209483209482309840239840932840932849083094809483"));
private static final DecimalType STATE2 = new DecimalType(600.9123);
private static final DecimalType STATE_BETWEEN = new DecimalType(500);
@BeforeAll
public static void storeData() throws InterruptedException {
NumberItem item = (NumberItem) ITEMS.get(NAME);
item.setState(STATE1);
beforeStore = ZonedDateTime.now();
Thread.sleep(10);
service.store(item);
afterStore1 = ZonedDateTime.now();
Thread.sleep(10);
item.setState(STATE2);
service.store(item);
Thread.sleep(10);
afterStore2 = ZonedDateTime.now();
LOGGER.info("Created item between {} and {}", AbstractDynamoDBItem.DATEFORMATTER.format(beforeStore),
AbstractDynamoDBItem.DATEFORMATTER.format(afterStore1));
}
@Override
protected String getItemName() {
return NAME;
}
@Override
protected State getFirstItemState() {
return STATE1;
}
@Override
protected State getSecondItemState() {
return STATE2;
}
@Override
protected @Nullable State getQueryItemStateBetween() {
return STATE_BETWEEN;
}
/**
* Use relaxed state comparison due to numerical rounding. See also DynamoDBBigDecimalItem.loseDigits
*/
@Override
protected void assertStateEquals(State expected, State actual) {
BigDecimal expectedDecimal = ((DecimalType) expected).toBigDecimal();
BigDecimal actualDecimal = ((DecimalType) actual).toBigDecimal();
assertTrue(DynamoDBBigDecimalItem.loseDigits(expectedDecimal).compareTo(actualDecimal) == 0);
}
}

View File

@@ -0,0 +1,165 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import static org.junit.jupiter.api.Assertions.*;
import static org.junit.jupiter.api.Assumptions.assumeTrue;
import java.math.BigDecimal;
import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.openhab.core.library.items.NumberItem;
import org.openhab.core.library.types.DecimalType;
import org.openhab.core.persistence.FilterCriteria;
import org.openhab.core.persistence.FilterCriteria.Operator;
import org.openhab.core.persistence.FilterCriteria.Ordering;
import org.openhab.core.persistence.HistoricItem;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class PagingIntegrationTest extends BaseIntegrationTest {
private static final String NAME = "number";
private static final int STATE_COUNT = 10;
private static @Nullable ZonedDateTime storeStart;
@BeforeAll
public static void checkService() throws InterruptedException {
String msg = "DynamoDB integration tests will be skipped. Did you specify AWS credentials for testing? "
+ "See BaseIntegrationTest for more details";
if (service == null) {
System.out.println(msg);
}
assumeTrue(service != null, msg);
populateData();
}
public static void populateData() {
storeStart = ZonedDateTime.now();
NumberItem item = (NumberItem) ITEMS.get(NAME);
for (int i = 0; i < STATE_COUNT; i++) {
item.setState(new DecimalType(i));
service.store(item);
}
}
@Test
public void testPagingFirstPage() {
FilterCriteria criteria = new FilterCriteria();
criteria.setItemName(NAME);
criteria.setBeginDate(storeStart);
criteria.setOrdering(Ordering.ASCENDING);
criteria.setPageNumber(0);
criteria.setPageSize(3);
assertItemStates(BaseIntegrationTest.service.query(criteria), 0, 1, 2);
}
@Test
public void testPagingSecondPage() {
FilterCriteria criteria = new FilterCriteria();
criteria.setItemName(NAME);
criteria.setBeginDate(storeStart);
criteria.setOrdering(Ordering.ASCENDING);
criteria.setPageNumber(1);
criteria.setPageSize(3);
assertItemStates(BaseIntegrationTest.service.query(criteria), 3, 4, 5);
}
@Test
public void testPagingPagePartialPage() {
FilterCriteria criteria = new FilterCriteria();
criteria.setItemName(NAME);
criteria.setBeginDate(storeStart);
criteria.setOrdering(Ordering.ASCENDING);
criteria.setPageNumber(3);
criteria.setPageSize(3);
assertItemStates(BaseIntegrationTest.service.query(criteria), 9);
}
@Test
public void testPagingPageOutOfBounds() {
FilterCriteria criteria = new FilterCriteria();
criteria.setItemName(NAME);
criteria.setBeginDate(storeStart);
criteria.setOrdering(Ordering.ASCENDING);
criteria.setPageNumber(4);
criteria.setPageSize(3);
assertItemStates(BaseIntegrationTest.service.query(criteria)); // no results
}
@Test
public void testPagingPage0Descending() {
FilterCriteria criteria = new FilterCriteria();
criteria.setItemName(NAME);
criteria.setBeginDate(storeStart);
criteria.setOrdering(Ordering.DESCENDING);
criteria.setPageNumber(0);
criteria.setPageSize(3);
assertItemStates(BaseIntegrationTest.service.query(criteria), 9, 8, 7);
}
@Test
public void testPagingPage0HugePageSize() {
FilterCriteria criteria = new FilterCriteria();
criteria.setItemName(NAME);
criteria.setBeginDate(storeStart);
criteria.setOrdering(Ordering.ASCENDING);
criteria.setPageNumber(0);
criteria.setPageSize(900);
assertItemStates(BaseIntegrationTest.service.query(criteria), 0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
}
@Test
public void testPagingFirstPageWithFilter() {
FilterCriteria criteria = new FilterCriteria();
criteria.setItemName(NAME);
criteria.setBeginDate(storeStart);
criteria.setOrdering(Ordering.ASCENDING);
criteria.setPageNumber(0);
criteria.setPageSize(3);
criteria.setOperator(Operator.GT);
criteria.setState(new DecimalType(new BigDecimal(3)));
assertItemStates(BaseIntegrationTest.service.query(criteria), 4, 5, 6);
}
private void assertItemStates(Iterable<HistoricItem> actualIterable, int... expected) {
Iterator<HistoricItem> actualIterator = actualIterable.iterator();
List<HistoricItem> got = new LinkedList<HistoricItem>();
for (int expectedState : expected) {
assertTrue(actualIterator.hasNext());
HistoricItem actual = actualIterator.next();
assertEquals(new DecimalType(expectedState), actual.getState());
got.add(actual);
}
if (actualIterator.hasNext()) {
fail("Did not expect any more items, but got at least this extra element: "
+ actualIterator.next().toString() + ". Before this we got: " + Arrays.toString(got.toArray()));
}
}
}

View File

@@ -0,0 +1,76 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.library.items.PlayerItem;
import org.openhab.core.library.types.PlayPauseType;
import org.openhab.core.types.State;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class PlayerItemPlayPauseIntegrationTest extends AbstractTwoItemIntegrationTest {
private static final String NAME = "player_playpause";
private static final PlayPauseType STATE1 = PlayPauseType.PAUSE;
private static final PlayPauseType STATE2 = PlayPauseType.PLAY;
private static final @Nullable PlayPauseType STATE_BETWEEN = null;
@BeforeAll
public static void storeData() throws InterruptedException {
PlayerItem item = (PlayerItem) ITEMS.get(NAME);
item.setState(STATE1);
beforeStore = ZonedDateTime.now();
Thread.sleep(10);
service.store(item);
afterStore1 = ZonedDateTime.now();
Thread.sleep(10);
item.setState(STATE2);
service.store(item);
Thread.sleep(10);
afterStore2 = ZonedDateTime.now();
LOGGER.info("Created item between {} and {}", AbstractDynamoDBItem.DATEFORMATTER.format(beforeStore),
AbstractDynamoDBItem.DATEFORMATTER.format(afterStore1));
}
@Override
protected String getItemName() {
return NAME;
}
@Override
protected State getFirstItemState() {
return STATE1;
}
@Override
protected State getSecondItemState() {
return STATE2;
}
@Override
protected @Nullable State getQueryItemStateBetween() {
return STATE_BETWEEN;
}
}

View File

@@ -0,0 +1,77 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.library.items.PlayerItem;
import org.openhab.core.library.types.PlayPauseType;
import org.openhab.core.library.types.RewindFastforwardType;
import org.openhab.core.types.State;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class PlayerItemRewindFastForwardIntegrationTest extends AbstractTwoItemIntegrationTest {
private static final String NAME = "player_rewindfastforward";
private static final RewindFastforwardType STATE1 = RewindFastforwardType.FASTFORWARD;
private static final RewindFastforwardType STATE2 = RewindFastforwardType.REWIND;
private static final @Nullable PlayPauseType STATE_BETWEEN = null;
@BeforeAll
public static void storeData() throws InterruptedException {
PlayerItem item = (PlayerItem) ITEMS.get(NAME);
item.setState(STATE1);
beforeStore = ZonedDateTime.now();
Thread.sleep(10);
service.store(item);
afterStore1 = ZonedDateTime.now();
Thread.sleep(10);
item.setState(STATE2);
service.store(item);
Thread.sleep(10);
afterStore2 = ZonedDateTime.now();
LOGGER.info("Created item between {} and {}", AbstractDynamoDBItem.DATEFORMATTER.format(beforeStore),
AbstractDynamoDBItem.DATEFORMATTER.format(afterStore1));
}
@Override
protected String getItemName() {
return NAME;
}
@Override
protected State getFirstItemState() {
return STATE1;
}
@Override
protected State getSecondItemState() {
return STATE2;
}
@Override
protected @Nullable State getQueryItemStateBetween() {
return STATE_BETWEEN;
}
}

View File

@@ -0,0 +1,88 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.math.BigDecimal;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.library.items.RollershutterItem;
import org.openhab.core.library.types.DecimalType;
import org.openhab.core.library.types.PercentType;
import org.openhab.core.types.State;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class RollershutterItemIntegrationTest extends AbstractTwoItemIntegrationTest {
private static final String NAME = "rollershutter";
private static final PercentType STATE1 = PercentType.ZERO;
private static final PercentType STATE2 = new PercentType("72.938289428989489389329834898929892439842399483498");
private static final PercentType STATE_BETWEEN = new PercentType(66); // no such that exists
@BeforeAll
public static void storeData() throws InterruptedException {
RollershutterItem item = (RollershutterItem) ITEMS.get(NAME);
item.setState(STATE1);
beforeStore = ZonedDateTime.now();
Thread.sleep(10);
service.store(item);
afterStore1 = ZonedDateTime.now();
Thread.sleep(10);
item.setState(STATE2);
service.store(item);
Thread.sleep(10);
afterStore2 = ZonedDateTime.now();
LOGGER.info("Created item between {} and {}", AbstractDynamoDBItem.DATEFORMATTER.format(beforeStore),
AbstractDynamoDBItem.DATEFORMATTER.format(afterStore1));
}
@Override
protected String getItemName() {
return NAME;
}
@Override
protected State getFirstItemState() {
return STATE1;
}
@Override
protected State getSecondItemState() {
return STATE2;
}
@Override
protected @Nullable State getQueryItemStateBetween() {
return STATE_BETWEEN;
}
/**
* Use relaxed state comparison due to numerical rounding. See also DynamoDBBigDecimalItem.loseDigits
*/
@Override
protected void assertStateEquals(State expected, State actual) {
BigDecimal expectedDecimal = ((DecimalType) expected).toBigDecimal();
BigDecimal actualDecimal = ((DecimalType) actual).toBigDecimal();
assertTrue(DynamoDBBigDecimalItem.loseDigits(expectedDecimal).compareTo(actualDecimal) == 0);
}
}

View File

@@ -0,0 +1,74 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.library.items.StringItem;
import org.openhab.core.library.types.StringType;
import org.openhab.core.types.State;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class StringItemIntegrationTest extends AbstractTwoItemIntegrationTest {
private static final String NAME = "string";
private static final StringType STATE1 = new StringType("b001");
private static final StringType STATE2 = new StringType("c002");
private static final StringType STATE_BETWEEN = new StringType("b001");
@BeforeAll
public static void storeData() throws InterruptedException {
StringItem item = (StringItem) ITEMS.get(NAME);
item.setState(STATE1);
beforeStore = ZonedDateTime.now();
Thread.sleep(10);
service.store(item);
afterStore1 = ZonedDateTime.now();
Thread.sleep(10);
item.setState(STATE2);
service.store(item);
Thread.sleep(10);
afterStore2 = ZonedDateTime.now();
LOGGER.info("Created item between {} and {}", AbstractDynamoDBItem.DATEFORMATTER.format(beforeStore),
AbstractDynamoDBItem.DATEFORMATTER.format(afterStore1));
}
@Override
protected String getItemName() {
return NAME;
}
@Override
protected State getFirstItemState() {
return STATE1;
}
@Override
protected State getSecondItemState() {
return STATE2;
}
@Override
protected @Nullable State getQueryItemStateBetween() {
return STATE_BETWEEN;
}
}

View File

@@ -0,0 +1,76 @@
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.persistence.dynamodb.internal;
import java.time.ZonedDateTime;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.junit.jupiter.api.BeforeAll;
import org.openhab.core.library.items.SwitchItem;
import org.openhab.core.library.types.OnOffType;
import org.openhab.core.types.State;
/**
*
* @author Sami Salonen - Initial contribution
*
*/
@NonNullByDefault
public class SwitchItemIntegrationTest extends AbstractTwoItemIntegrationTest {
private static final String NAME = "switch";
private static final OnOffType STATE1 = OnOffType.OFF;
private static final OnOffType STATE2 = OnOffType.ON;
// There is no OnOffType state value between OFF and ON.
// Omit extended query tests AbstractTwoItemIntegrationTest by setting stateBetween to null.
private static final @Nullable OnOffType STATE_BETWEEN = null;
@BeforeAll
public static void storeData() throws InterruptedException {
SwitchItem item = (SwitchItem) ITEMS.get(NAME);
item.setState(STATE1);
beforeStore = ZonedDateTime.now();
Thread.sleep(10);
service.store(item);
afterStore1 = ZonedDateTime.now();
Thread.sleep(10);
item.setState(STATE2);
service.store(item);
Thread.sleep(10);
afterStore2 = ZonedDateTime.now();
LOGGER.info("Created item between {} and {}", AbstractDynamoDBItem.DATEFORMATTER.format(beforeStore),
AbstractDynamoDBItem.DATEFORMATTER.format(afterStore1));
}
@Override
protected String getItemName() {
return NAME;
}
@Override
protected State getFirstItemState() {
return STATE1;
}
@Override
protected State getSecondItemState() {
return STATE2;
}
@Override
protected @Nullable State getQueryItemStateBetween() {
return STATE_BETWEEN;
}
}