Merge branch 'master' of https://github.com/thingsboard/thingsboard into develop/2.5-js-executor

This commit is contained in:
YevhenBondarenko 2020-04-30 16:24:28 +03:00
commit 237c288050
35 changed files with 218 additions and 152 deletions

View File

@ -21,6 +21,8 @@ CREATE OR REPLACE PROCEDURE create_partition_ts_kv_table() LANGUAGE plpgsql AS $
BEGIN BEGIN
ALTER TABLE ts_kv ALTER TABLE ts_kv
RENAME TO ts_kv_old; RENAME TO ts_kv_old;
ALTER TABLE ts_kv_old
RENAME CONSTRAINT ts_kv_pkey TO ts_kv_pkey_old;
CREATE TABLE IF NOT EXISTS ts_kv CREATE TABLE IF NOT EXISTS ts_kv
( (
LIKE ts_kv_old LIKE ts_kv_old
@ -32,6 +34,8 @@ BEGIN
ALTER COLUMN entity_id TYPE uuid USING entity_id::uuid; ALTER COLUMN entity_id TYPE uuid USING entity_id::uuid;
ALTER TABLE ts_kv ALTER TABLE ts_kv
ALTER COLUMN key TYPE integer USING key::integer; ALTER COLUMN key TYPE integer USING key::integer;
ALTER TABLE ts_kv
ADD CONSTRAINT ts_kv_pkey PRIMARY KEY (entity_id, key, ts);
END; END;
$$; $$;
@ -59,33 +63,65 @@ BEGIN
END; END;
$$; $$;
CREATE OR REPLACE FUNCTION get_partitions_data(IN partition_type varchar)
RETURNS
TABLE
(
partition_date text,
from_ts bigint,
to_ts bigint
)
AS
$$
BEGIN
CASE
WHEN partition_type = 'DAYS' THEN
RETURN QUERY SELECT day_date.day AS partition_date,
(extract(epoch from (day_date.day)::timestamp) * 1000)::bigint AS from_ts,
(extract(epoch from (day_date.day::date + INTERVAL '1 DAY')::timestamp) *
1000)::bigint AS to_ts
FROM (SELECT DISTINCT TO_CHAR(TO_TIMESTAMP(ts / 1000), 'YYYY_MM_DD') AS day
FROM ts_kv_old) AS day_date;
WHEN partition_type = 'MONTHS' THEN
RETURN QUERY SELECT SUBSTRING(month_date.first_date, 1, 7) AS partition_date,
(extract(epoch from (month_date.first_date)::timestamp) * 1000)::bigint AS from_ts,
(extract(epoch from (month_date.first_date::date + INTERVAL '1 MONTH')::timestamp) *
1000)::bigint AS to_ts
FROM (SELECT DISTINCT TO_CHAR(TO_TIMESTAMP(ts / 1000), 'YYYY_MM_01') AS first_date
FROM ts_kv_old) AS month_date;
WHEN partition_type = 'YEARS' THEN
RETURN QUERY SELECT SUBSTRING(year_date.year, 1, 4) AS partition_date,
(extract(epoch from (year_date.year)::timestamp) * 1000)::bigint AS from_ts,
(extract(epoch from (year_date.year::date + INTERVAL '1 YEAR')::timestamp) *
1000)::bigint AS to_ts
FROM (SELECT DISTINCT TO_CHAR(TO_TIMESTAMP(ts / 1000), 'YYYY_01_01') AS year FROM ts_kv_old) AS year_date;
ELSE
RAISE EXCEPTION 'Failed to parse partitioning property: % !', partition_type;
END CASE;
END;
$$ LANGUAGE plpgsql;
-- call create_partitions(); -- call create_partitions();
CREATE OR REPLACE PROCEDURE create_partitions() LANGUAGE plpgsql AS $$ CREATE OR REPLACE PROCEDURE create_partitions(IN partition_type varchar) LANGUAGE plpgsql AS $$
DECLARE DECLARE
partition_date varchar; partition_date varchar;
from_ts bigint; from_ts bigint;
to_ts bigint; to_ts bigint;
key_cursor CURSOR FOR select SUBSTRING(month_date.first_date, 1, 7) AS partition_date, partitions_cursor CURSOR FOR SELECT * FROM get_partitions_data(partition_type);
extract(epoch from (month_date.first_date)::timestamp) * 1000 as from_ts,
extract(epoch from (month_date.first_date::date + INTERVAL '1 MONTH')::timestamp) *
1000 as to_ts
FROM (SELECT DISTINCT TO_CHAR(TO_TIMESTAMP(ts / 1000), 'YYYY_MM_01') AS first_date
FROM ts_kv_old) AS month_date;
BEGIN BEGIN
OPEN key_cursor; OPEN partitions_cursor;
LOOP LOOP
FETCH key_cursor INTO partition_date, from_ts, to_ts; FETCH partitions_cursor INTO partition_date, from_ts, to_ts;
EXIT WHEN NOT FOUND; EXIT WHEN NOT FOUND;
EXECUTE 'CREATE TABLE IF NOT EXISTS ts_kv_' || partition_date || EXECUTE 'CREATE TABLE IF NOT EXISTS ts_kv_' || partition_date ||
' PARTITION OF ts_kv(PRIMARY KEY (entity_id, key, ts)) FOR VALUES FROM (' || from_ts || ' PARTITION OF ts_kv FOR VALUES FROM (' || from_ts ||
') TO (' || to_ts || ');'; ') TO (' || to_ts || ');';
RAISE NOTICE 'A partition % has been created!',CONCAT('ts_kv_', partition_date); RAISE NOTICE 'A partition % has been created!',CONCAT('ts_kv_', partition_date);
END LOOP; END LOOP;
CLOSE key_cursor; CLOSE partitions_cursor;
END; END;
$$; $$;

View File

@ -106,7 +106,8 @@ public abstract class AbstractSqlTsDatabaseUpgradeService {
Thread.sleep(2000); Thread.sleep(2000);
log.info("Successfully executed query: {}", query); log.info("Successfully executed query: {}", query);
} catch (InterruptedException | SQLException e) { } catch (InterruptedException | SQLException e) {
log.info("Failed to execute query: {} due to: {}", query, e.getMessage()); log.error("Failed to execute query: {} due to: {}", query, e.getMessage());
throw new RuntimeException("Failed to execute query:" + query + " due to: ", e);
} }
} }

View File

@ -15,6 +15,7 @@
*/ */
package org.thingsboard.server.service.install; package org.thingsboard.server.service.install;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Profile; import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.thingsboard.server.dao.util.PsqlDao; import org.thingsboard.server.dao.util.PsqlDao;
@ -24,9 +25,20 @@ import org.thingsboard.server.dao.util.SqlTsDao;
@SqlTsDao @SqlTsDao
@PsqlDao @PsqlDao
@Profile("install") @Profile("install")
public class PsqlTsDatabaseSchemaService extends SqlAbstractDatabaseSchemaService public class PsqlTsDatabaseSchemaService extends SqlAbstractDatabaseSchemaService implements TsDatabaseSchemaService {
implements TsDatabaseSchemaService {
@Value("${sql.postgres.ts_key_value_partitioning:MONTHS}")
private String partitionType;
public PsqlTsDatabaseSchemaService() { public PsqlTsDatabaseSchemaService() {
super("schema-ts-psql.sql", null); super("schema-ts-psql.sql", null);
} }
@Override
public void createDatabaseSchema() throws Exception {
super.createDatabaseSchema();
if (partitionType.equals("INDEFINITE")) {
executeQuery("CREATE TABLE ts_kv_indefinite PARTITION OF ts_kv DEFAULT;");
}
}
} }

View File

@ -16,6 +16,7 @@
package org.thingsboard.server.service.install; package org.thingsboard.server.service.install;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Profile; import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.thingsboard.server.dao.util.PsqlDao; import org.thingsboard.server.dao.util.PsqlDao;
@ -33,6 +34,9 @@ import java.sql.DriverManager;
@PsqlDao @PsqlDao
public class PsqlTsDatabaseUpgradeService extends AbstractSqlTsDatabaseUpgradeService implements DatabaseTsUpgradeService { public class PsqlTsDatabaseUpgradeService extends AbstractSqlTsDatabaseUpgradeService implements DatabaseTsUpgradeService {
@Value("${sql.postgres.ts_key_value_partitioning:MONTHS}")
private String partitionType;
private static final String LOAD_FUNCTIONS_SQL = "schema_update_psql_ts.sql"; private static final String LOAD_FUNCTIONS_SQL = "schema_update_psql_ts.sql";
private static final String LOAD_TTL_FUNCTIONS_SQL = "schema_update_ttl.sql"; private static final String LOAD_TTL_FUNCTIONS_SQL = "schema_update_ttl.sql";
private static final String LOAD_DROP_PARTITIONS_FUNCTIONS_SQL = "schema_update_psql_drop_partitions.sql"; private static final String LOAD_DROP_PARTITIONS_FUNCTIONS_SQL = "schema_update_psql_drop_partitions.sql";
@ -50,7 +54,6 @@ public class PsqlTsDatabaseUpgradeService extends AbstractSqlTsDatabaseUpgradeSe
private static final String CALL_CREATE_PARTITION_TS_KV_TABLE = CALL_REGEX + CREATE_PARTITION_TS_KV_TABLE; private static final String CALL_CREATE_PARTITION_TS_KV_TABLE = CALL_REGEX + CREATE_PARTITION_TS_KV_TABLE;
private static final String CALL_CREATE_NEW_TS_KV_LATEST_TABLE = CALL_REGEX + CREATE_NEW_TS_KV_LATEST_TABLE; private static final String CALL_CREATE_NEW_TS_KV_LATEST_TABLE = CALL_REGEX + CREATE_NEW_TS_KV_LATEST_TABLE;
private static final String CALL_CREATE_PARTITIONS = CALL_REGEX + CREATE_PARTITIONS;
private static final String CALL_CREATE_TS_KV_DICTIONARY_TABLE = CALL_REGEX + CREATE_TS_KV_DICTIONARY_TABLE; private static final String CALL_CREATE_TS_KV_DICTIONARY_TABLE = CALL_REGEX + CREATE_TS_KV_DICTIONARY_TABLE;
private static final String CALL_INSERT_INTO_DICTIONARY = CALL_REGEX + INSERT_INTO_DICTIONARY; private static final String CALL_INSERT_INTO_DICTIONARY = CALL_REGEX + INSERT_INTO_DICTIONARY;
private static final String CALL_INSERT_INTO_TS_KV = CALL_REGEX + INSERT_INTO_TS_KV; private static final String CALL_INSERT_INTO_TS_KV = CALL_REGEX + INSERT_INTO_TS_KV;
@ -66,6 +69,7 @@ public class PsqlTsDatabaseUpgradeService extends AbstractSqlTsDatabaseUpgradeSe
private static final String DROP_PROCEDURE_INSERT_INTO_DICTIONARY = DROP_PROCEDURE_IF_EXISTS + INSERT_INTO_DICTIONARY; private static final String DROP_PROCEDURE_INSERT_INTO_DICTIONARY = DROP_PROCEDURE_IF_EXISTS + INSERT_INTO_DICTIONARY;
private static final String DROP_PROCEDURE_INSERT_INTO_TS_KV = DROP_PROCEDURE_IF_EXISTS + INSERT_INTO_TS_KV; private static final String DROP_PROCEDURE_INSERT_INTO_TS_KV = DROP_PROCEDURE_IF_EXISTS + INSERT_INTO_TS_KV;
private static final String DROP_PROCEDURE_INSERT_INTO_TS_KV_LATEST = DROP_PROCEDURE_IF_EXISTS + INSERT_INTO_TS_KV_LATEST; private static final String DROP_PROCEDURE_INSERT_INTO_TS_KV_LATEST = DROP_PROCEDURE_IF_EXISTS + INSERT_INTO_TS_KV_LATEST;
private static final String DROP_FUNCTION_GET_PARTITION_DATA = "DROP FUNCTION IF EXISTS get_partitions_data;";
@Override @Override
public void upgradeDatabase(String fromVersion) throws Exception { public void upgradeDatabase(String fromVersion) throws Exception {
@ -83,7 +87,11 @@ public class PsqlTsDatabaseUpgradeService extends AbstractSqlTsDatabaseUpgradeSe
loadSql(conn, LOAD_FUNCTIONS_SQL); loadSql(conn, LOAD_FUNCTIONS_SQL);
log.info("Updating timeseries schema ..."); log.info("Updating timeseries schema ...");
executeQuery(conn, CALL_CREATE_PARTITION_TS_KV_TABLE); executeQuery(conn, CALL_CREATE_PARTITION_TS_KV_TABLE);
executeQuery(conn, CALL_CREATE_PARTITIONS); if (!partitionType.equals("INDEFINITE")) {
executeQuery(conn, "call create_partitions('" + partitionType + "')");
} else {
executeQuery(conn, "CREATE TABLE IF NOT EXISTS ts_kv_indefinite PARTITION OF ts_kv DEFAULT;");
}
executeQuery(conn, CALL_CREATE_TS_KV_DICTIONARY_TABLE); executeQuery(conn, CALL_CREATE_TS_KV_DICTIONARY_TABLE);
executeQuery(conn, CALL_INSERT_INTO_DICTIONARY); executeQuery(conn, CALL_INSERT_INTO_DICTIONARY);
executeQuery(conn, CALL_INSERT_INTO_TS_KV); executeQuery(conn, CALL_INSERT_INTO_TS_KV);
@ -100,9 +108,14 @@ public class PsqlTsDatabaseUpgradeService extends AbstractSqlTsDatabaseUpgradeSe
executeQuery(conn, DROP_PROCEDURE_INSERT_INTO_TS_KV); executeQuery(conn, DROP_PROCEDURE_INSERT_INTO_TS_KV);
executeQuery(conn, DROP_PROCEDURE_CREATE_NEW_TS_KV_LATEST_TABLE); executeQuery(conn, DROP_PROCEDURE_CREATE_NEW_TS_KV_LATEST_TABLE);
executeQuery(conn, DROP_PROCEDURE_INSERT_INTO_TS_KV_LATEST); executeQuery(conn, DROP_PROCEDURE_INSERT_INTO_TS_KV_LATEST);
executeQuery(conn, DROP_PROCEDURE_INSERT_INTO_TS_KV_LATEST);
executeQuery(conn, DROP_FUNCTION_GET_PARTITION_DATA);
executeQuery(conn, "ALTER TABLE ts_kv ADD COLUMN IF NOT EXISTS json_v json;"); executeQuery(conn, "ALTER TABLE ts_kv ADD COLUMN IF NOT EXISTS json_v json;");
executeQuery(conn, "ALTER TABLE ts_kv_latest ADD COLUMN IF NOT EXISTS json_v json;"); executeQuery(conn, "ALTER TABLE ts_kv_latest ADD COLUMN IF NOT EXISTS json_v json;");
} else {
executeQuery(conn, "ALTER TABLE ts_kv DROP CONSTRAINT IF EXISTS ts_kv_pkey;");
executeQuery(conn, "ALTER TABLE ts_kv ADD CONSTRAINT ts_kv_pkey PRIMARY KEY (entity_id, key, ts);");
} }
log.info("Load TTL functions ..."); log.info("Load TTL functions ...");

View File

@ -25,6 +25,7 @@ import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.sql.Connection; import java.sql.Connection;
import java.sql.DriverManager; import java.sql.DriverManager;
import java.sql.SQLException;
@Slf4j @Slf4j
public abstract class SqlAbstractDatabaseSchemaService implements DatabaseSchemaService { public abstract class SqlAbstractDatabaseSchemaService implements DatabaseSchemaService {
@ -73,4 +74,14 @@ public abstract class SqlAbstractDatabaseSchemaService implements DatabaseSchema
} }
} }
protected void executeQuery(String query) {
try (Connection conn = DriverManager.getConnection(dbUrl, dbUserName, dbPassword)) {
conn.createStatement().execute(query); //NOSONAR, ignoring because method used to execute thingsboard database upgrade script
log.info("Successfully executed query: {}", query);
Thread.sleep(5000);
} catch (InterruptedException | SQLException e) {
log.info("Failed to execute query: {} due to: {}", query, e.getMessage());
}
}
} }

View File

@ -33,14 +33,6 @@ import java.sql.SQLException;
@Slf4j @Slf4j
public class TimescaleTsDatabaseSchemaService extends SqlAbstractDatabaseSchemaService implements TsDatabaseSchemaService { public class TimescaleTsDatabaseSchemaService extends SqlAbstractDatabaseSchemaService implements TsDatabaseSchemaService {
private static final String QUERY = "query: {}";
private static final String SUCCESSFULLY_EXECUTED = "Successfully executed ";
private static final String FAILED_TO_EXECUTE = "Failed to execute ";
private static final String FAILED_DUE_TO = " due to: {}";
private static final String SUCCESSFULLY_EXECUTED_QUERY = SUCCESSFULLY_EXECUTED + QUERY;
private static final String FAILED_TO_EXECUTE_QUERY = FAILED_TO_EXECUTE + QUERY + FAILED_DUE_TO;
@Value("${sql.timescale.chunk_time_interval:86400000}") @Value("${sql.timescale.chunk_time_interval:86400000}")
private long chunkTimeInterval; private long chunkTimeInterval;
@ -54,15 +46,4 @@ public class TimescaleTsDatabaseSchemaService extends SqlAbstractDatabaseSchemaS
executeQuery("SELECT create_hypertable('ts_kv', 'ts', chunk_time_interval => " + chunkTimeInterval + ", if_not_exists => true);"); executeQuery("SELECT create_hypertable('ts_kv', 'ts', chunk_time_interval => " + chunkTimeInterval + ", if_not_exists => true);");
} }
private void executeQuery(String query) {
try (Connection conn = DriverManager.getConnection(dbUrl, dbUserName, dbPassword)) {
conn.createStatement().execute(query); //NOSONAR, ignoring because method used to execute thingsboard database upgrade script
log.info(SUCCESSFULLY_EXECUTED_QUERY, query);
Thread.sleep(5000);
} catch (InterruptedException | SQLException e) {
log.info(FAILED_TO_EXECUTE_QUERY, query, e.getMessage());
}
}
} }

View File

@ -18,10 +18,13 @@ package org.thingsboard.server.service.script;
import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.thingsboard.common.util.ThingsBoardThreadFactory;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
/** /**
@ -30,9 +33,22 @@ import java.util.concurrent.atomic.AtomicInteger;
@Slf4j @Slf4j
public abstract class AbstractJsInvokeService implements JsInvokeService { public abstract class AbstractJsInvokeService implements JsInvokeService {
protected ScheduledExecutorService timeoutExecutorService;
protected Map<UUID, String> scriptIdToNameMap = new ConcurrentHashMap<>(); protected Map<UUID, String> scriptIdToNameMap = new ConcurrentHashMap<>();
protected Map<UUID, BlackListInfo> blackListedFunctions = new ConcurrentHashMap<>(); protected Map<UUID, BlackListInfo> blackListedFunctions = new ConcurrentHashMap<>();
public void init(long maxRequestsTimeout) {
if (maxRequestsTimeout > 0) {
timeoutExecutorService = Executors.newSingleThreadScheduledExecutor(ThingsBoardThreadFactory.forName("nashorn-js-timeout"));
}
}
public void stop() {
if (timeoutExecutorService != null) {
timeoutExecutorService.shutdownNow();
}
}
@Override @Override
public ListenableFuture<UUID> eval(JsScriptType scriptType, String scriptBody, String... argNames) { public ListenableFuture<UUID> eval(JsScriptType scriptType, String scriptBody, String... argNames) {
UUID scriptId = UUID.randomUUID(); UUID scriptId = UUID.randomUUID();

View File

@ -48,7 +48,6 @@ public abstract class AbstractNashornJsInvokeService extends AbstractJsInvokeSer
private NashornSandbox sandbox; private NashornSandbox sandbox;
private ScriptEngine engine; private ScriptEngine engine;
private ExecutorService monitorExecutorService; private ExecutorService monitorExecutorService;
private ScheduledExecutorService timeoutExecutorService;
private final AtomicInteger jsPushedMsgs = new AtomicInteger(0); private final AtomicInteger jsPushedMsgs = new AtomicInteger(0);
private final AtomicInteger jsInvokeMsgs = new AtomicInteger(0); private final AtomicInteger jsInvokeMsgs = new AtomicInteger(0);
@ -85,9 +84,7 @@ public abstract class AbstractNashornJsInvokeService extends AbstractJsInvokeSer
@PostConstruct @PostConstruct
public void init() { public void init() {
if (maxRequestsTimeout > 0) { super.init(maxRequestsTimeout);
timeoutExecutorService = Executors.newSingleThreadScheduledExecutor(ThingsBoardThreadFactory.forName("nashorn-js-timeout"));
}
if (useJsSandbox()) { if (useJsSandbox()) {
sandbox = NashornSandboxes.create(); sandbox = NashornSandboxes.create();
monitorExecutorService = Executors.newWorkStealingPool(getMonitorThreadPoolSize()); monitorExecutorService = Executors.newWorkStealingPool(getMonitorThreadPoolSize());
@ -104,12 +101,10 @@ public abstract class AbstractNashornJsInvokeService extends AbstractJsInvokeSer
@PreDestroy @PreDestroy
public void stop() { public void stop() {
super.stop();
if (monitorExecutorService != null) { if (monitorExecutorService != null) {
monitorExecutorService.shutdownNow(); monitorExecutorService.shutdownNow();
} }
if (timeoutExecutorService != null) {
timeoutExecutorService.shutdownNow();
}
} }
protected abstract boolean useJsSandbox(); protected abstract boolean useJsSandbox();

View File

@ -87,11 +87,13 @@ public class RemoteJsInvokeService extends AbstractJsInvokeService {
@PostConstruct @PostConstruct
public void init() { public void init() {
super.init(maxRequestsTimeout);
requestTemplate.init(); requestTemplate.init();
} }
@PreDestroy @PreDestroy
public void destroy() { public void destroy() {
super.stop();
if (requestTemplate != null) { if (requestTemplate != null) {
requestTemplate.stop(); requestTemplate.stop();
} }
@ -111,7 +113,9 @@ public class RemoteJsInvokeService extends AbstractJsInvokeService {
log.trace("Post compile request for scriptId [{}]", scriptId); log.trace("Post compile request for scriptId [{}]", scriptId);
ListenableFuture<TbProtoQueueMsg<JsInvokeProtos.RemoteJsResponse>> future = requestTemplate.send(new TbProtoJsQueueMsg<>(UUID.randomUUID(), jsRequestWrapper)); ListenableFuture<TbProtoQueueMsg<JsInvokeProtos.RemoteJsResponse>> future = requestTemplate.send(new TbProtoJsQueueMsg<>(UUID.randomUUID(), jsRequestWrapper));
if (maxRequestsTimeout > 0) {
future = Futures.withTimeout(future, maxRequestsTimeout, TimeUnit.MILLISECONDS, timeoutExecutorService);
}
kafkaPushedMsgs.incrementAndGet(); kafkaPushedMsgs.incrementAndGet();
Futures.addCallback(future, new FutureCallback<TbProtoQueueMsg<JsInvokeProtos.RemoteJsResponse>>() { Futures.addCallback(future, new FutureCallback<TbProtoQueueMsg<JsInvokeProtos.RemoteJsResponse>>() {
@Override @Override
@ -154,8 +158,8 @@ public class RemoteJsInvokeService extends AbstractJsInvokeService {
.setTimeout((int) maxRequestsTimeout) .setTimeout((int) maxRequestsTimeout)
.setScriptBody(scriptIdToBodysMap.get(scriptId)); .setScriptBody(scriptIdToBodysMap.get(scriptId));
for (int i = 0; i < args.length; i++) { for (Object arg : args) {
jsRequestBuilder.addArgs(args[i].toString()); jsRequestBuilder.addArgs(arg.toString());
} }
JsInvokeProtos.RemoteJsRequest jsRequestWrapper = JsInvokeProtos.RemoteJsRequest.newBuilder() JsInvokeProtos.RemoteJsRequest jsRequestWrapper = JsInvokeProtos.RemoteJsRequest.newBuilder()
@ -163,6 +167,9 @@ public class RemoteJsInvokeService extends AbstractJsInvokeService {
.build(); .build();
ListenableFuture<TbProtoQueueMsg<JsInvokeProtos.RemoteJsResponse>> future = requestTemplate.send(new TbProtoJsQueueMsg<>(UUID.randomUUID(), jsRequestWrapper)); ListenableFuture<TbProtoQueueMsg<JsInvokeProtos.RemoteJsResponse>> future = requestTemplate.send(new TbProtoJsQueueMsg<>(UUID.randomUUID(), jsRequestWrapper));
if (maxRequestsTimeout > 0) {
future = Futures.withTimeout(future, maxRequestsTimeout, TimeUnit.MILLISECONDS, timeoutExecutorService);
}
kafkaPushedMsgs.incrementAndGet(); kafkaPushedMsgs.incrementAndGet();
Futures.addCallback(future, new FutureCallback<TbProtoQueueMsg<JsInvokeProtos.RemoteJsResponse>>() { Futures.addCallback(future, new FutureCallback<TbProtoQueueMsg<JsInvokeProtos.RemoteJsResponse>>() {
@Override @Override
@ -203,6 +210,9 @@ public class RemoteJsInvokeService extends AbstractJsInvokeService {
.build(); .build();
ListenableFuture<TbProtoQueueMsg<JsInvokeProtos.RemoteJsResponse>> future = requestTemplate.send(new TbProtoJsQueueMsg<>(UUID.randomUUID(), jsRequestWrapper)); ListenableFuture<TbProtoQueueMsg<JsInvokeProtos.RemoteJsResponse>> future = requestTemplate.send(new TbProtoJsQueueMsg<>(UUID.randomUUID(), jsRequestWrapper));
if (maxRequestsTimeout > 0) {
future = Futures.withTimeout(future, maxRequestsTimeout, TimeUnit.MILLISECONDS, timeoutExecutorService);
}
JsInvokeProtos.RemoteJsResponse response = future.get().getValue(); JsInvokeProtos.RemoteJsResponse response = future.get().getValue();
JsInvokeProtos.JsReleaseResponse compilationResult = response.getReleaseResponse(); JsInvokeProtos.JsReleaseResponse compilationResult = response.getReleaseResponse();

View File

@ -33,7 +33,7 @@ public class ControllerSqlTestSuite {
@ClassRule @ClassRule
public static CustomSqlUnit sqlUnit = new CustomSqlUnit( public static CustomSqlUnit sqlUnit = new CustomSqlUnit(
Arrays.asList("sql/schema-ts-hsql.sql", "sql/schema-entities-hsql.sql", "sql/schema-entities-idx.sql", "sql/system-data.sql"), Arrays.asList("sql/schema-ts-hsql.sql", "sql/schema-entities-hsql.sql", "sql/schema-entities-idx.sql", "sql/system-data.sql"),
"sql/drop-all-tables.sql", "sql/hsql/drop-all-tables.sql",
"sql-test.properties"); "sql-test.properties");
@BeforeClass @BeforeClass

View File

@ -32,7 +32,7 @@ public class MqttSqlTestSuite {
@ClassRule @ClassRule
public static CustomSqlUnit sqlUnit = new CustomSqlUnit( public static CustomSqlUnit sqlUnit = new CustomSqlUnit(
Arrays.asList("sql/schema-ts-hsql.sql", "sql/schema-entities-hsql.sql", "sql/system-data.sql"), Arrays.asList("sql/schema-ts-hsql.sql", "sql/schema-entities-hsql.sql", "sql/system-data.sql"),
"sql/drop-all-tables.sql", "sql/hsql/drop-all-tables.sql",
"sql-test.properties"); "sql-test.properties");
@BeforeClass @BeforeClass

View File

@ -33,7 +33,7 @@ public class RuleEngineSqlTestSuite {
@ClassRule @ClassRule
public static CustomSqlUnit sqlUnit = new CustomSqlUnit( public static CustomSqlUnit sqlUnit = new CustomSqlUnit(
Arrays.asList("sql/schema-ts-hsql.sql", "sql/schema-entities-hsql.sql", "sql/system-data.sql"), Arrays.asList("sql/schema-ts-hsql.sql", "sql/schema-entities-hsql.sql", "sql/system-data.sql"),
"sql/drop-all-tables.sql", "sql/hsql/drop-all-tables.sql",
"sql-test.properties"); "sql-test.properties");
@BeforeClass @BeforeClass

View File

@ -34,7 +34,7 @@ public class SystemSqlTestSuite {
@ClassRule @ClassRule
public static CustomSqlUnit sqlUnit = new CustomSqlUnit( public static CustomSqlUnit sqlUnit = new CustomSqlUnit(
Arrays.asList("sql/schema-ts-hsql.sql", "sql/schema-entities-hsql.sql", "sql/system-data.sql"), Arrays.asList("sql/schema-ts-hsql.sql", "sql/schema-entities-hsql.sql", "sql/system-data.sql"),
"sql/drop-all-tables.sql", "sql/hsql/drop-all-tables.sql",
"sql-test.properties"); "sql-test.properties");
@BeforeClass @BeforeClass

View File

@ -54,7 +54,7 @@ public abstract class AbstractChunkedAggregationTimeseriesDao extends AbstractSq
@Autowired @Autowired
protected InsertTsRepository<TsKvEntity> insertRepository; protected InsertTsRepository<TsKvEntity> insertRepository;
protected TbSqlBlockingQueue<EntityContainer<TsKvEntity>> tsQueue; protected TbSqlBlockingQueue<TsKvEntity> tsQueue;
@PostConstruct @PostConstruct
protected void init() { protected void init() {

View File

@ -23,7 +23,6 @@ import org.thingsboard.server.common.data.id.TenantId;
import org.thingsboard.server.common.data.kv.TsKvEntry; import org.thingsboard.server.common.data.kv.TsKvEntry;
import org.thingsboard.server.dao.model.sqlts.ts.TsKvEntity; import org.thingsboard.server.dao.model.sqlts.ts.TsKvEntity;
import org.thingsboard.server.dao.sqlts.AbstractChunkedAggregationTimeseriesDao; import org.thingsboard.server.dao.sqlts.AbstractChunkedAggregationTimeseriesDao;
import org.thingsboard.server.dao.sqlts.EntityContainer;
import org.thingsboard.server.dao.timeseries.TimeseriesDao; import org.thingsboard.server.dao.timeseries.TimeseriesDao;
import org.thingsboard.server.dao.util.HsqlDao; import org.thingsboard.server.dao.util.HsqlDao;
import org.thingsboard.server.dao.util.SqlTsDao; import org.thingsboard.server.dao.util.SqlTsDao;
@ -48,7 +47,7 @@ public class JpaHsqlTimeseriesDao extends AbstractChunkedAggregationTimeseriesDa
entity.setLongValue(tsKvEntry.getLongValue().orElse(null)); entity.setLongValue(tsKvEntry.getLongValue().orElse(null));
entity.setBooleanValue(tsKvEntry.getBooleanValue().orElse(null)); entity.setBooleanValue(tsKvEntry.getBooleanValue().orElse(null));
log.trace("Saving entity: {}", entity); log.trace("Saving entity: {}", entity);
return tsQueue.add(new EntityContainer(entity, null)); return tsQueue.add(entity);
} }
} }

View File

@ -16,12 +16,11 @@
package org.thingsboard.server.dao.sqlts.insert; package org.thingsboard.server.dao.sqlts.insert;
import org.thingsboard.server.dao.model.sql.AbstractTsKvEntity; import org.thingsboard.server.dao.model.sql.AbstractTsKvEntity;
import org.thingsboard.server.dao.sqlts.EntityContainer;
import java.util.List; import java.util.List;
public interface InsertTsRepository<T extends AbstractTsKvEntity> { public interface InsertTsRepository<T extends AbstractTsKvEntity> {
void saveOrUpdate(List<EntityContainer<T>> entities); void saveOrUpdate(List<T> entities);
} }

View File

@ -19,7 +19,6 @@ import org.springframework.jdbc.core.BatchPreparedStatementSetter;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import org.thingsboard.server.dao.model.sqlts.ts.TsKvEntity; import org.thingsboard.server.dao.model.sqlts.ts.TsKvEntity;
import org.thingsboard.server.dao.sqlts.EntityContainer;
import org.thingsboard.server.dao.sqlts.insert.AbstractInsertRepository; import org.thingsboard.server.dao.sqlts.insert.AbstractInsertRepository;
import org.thingsboard.server.dao.sqlts.insert.InsertTsRepository; import org.thingsboard.server.dao.sqlts.insert.InsertTsRepository;
import org.thingsboard.server.dao.util.HsqlDao; import org.thingsboard.server.dao.util.HsqlDao;
@ -47,12 +46,11 @@ public class HsqlInsertTsRepository extends AbstractInsertRepository implements
"VALUES (T.entity_id, T.key, T.ts, T.bool_v, T.str_v, T.long_v, T.dbl_v, T.json_v);"; "VALUES (T.entity_id, T.key, T.ts, T.bool_v, T.str_v, T.long_v, T.dbl_v, T.json_v);";
@Override @Override
public void saveOrUpdate(List<EntityContainer<TsKvEntity>> entities) { public void saveOrUpdate(List<TsKvEntity> entities) {
jdbcTemplate.batchUpdate(INSERT_OR_UPDATE, new BatchPreparedStatementSetter() { jdbcTemplate.batchUpdate(INSERT_OR_UPDATE, new BatchPreparedStatementSetter() {
@Override @Override
public void setValues(PreparedStatement ps, int i) throws SQLException { public void setValues(PreparedStatement ps, int i) throws SQLException {
EntityContainer<TsKvEntity> tsKvEntityEntityContainer = entities.get(i); TsKvEntity tsKvEntity = entities.get(i);
TsKvEntity tsKvEntity = tsKvEntityEntityContainer.getEntity();
ps.setObject(1, tsKvEntity.getEntityId()); ps.setObject(1, tsKvEntity.getEntityId());
ps.setInt(2, tsKvEntity.getKey()); ps.setInt(2, tsKvEntity.getKey());
ps.setLong(3, tsKvEntity.getTs()); ps.setLong(3, tsKvEntity.getTs());

View File

@ -20,7 +20,6 @@ import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import org.thingsboard.server.dao.model.sqlts.ts.TsKvEntity; import org.thingsboard.server.dao.model.sqlts.ts.TsKvEntity;
import org.thingsboard.server.dao.sqlts.insert.AbstractInsertRepository; import org.thingsboard.server.dao.sqlts.insert.AbstractInsertRepository;
import org.thingsboard.server.dao.sqlts.EntityContainer;
import org.thingsboard.server.dao.sqlts.insert.InsertTsRepository; import org.thingsboard.server.dao.sqlts.insert.InsertTsRepository;
import org.thingsboard.server.dao.util.PsqlDao; import org.thingsboard.server.dao.util.PsqlDao;
import org.thingsboard.server.dao.util.SqlTsDao; import org.thingsboard.server.dao.util.SqlTsDao;
@ -28,10 +27,7 @@ import org.thingsboard.server.dao.util.SqlTsDao;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Types; import java.sql.Types;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
@SqlTsDao @SqlTsDao
@PsqlDao @PsqlDao
@ -39,22 +35,15 @@ import java.util.Map;
@Transactional @Transactional
public class PsqlInsertTsRepository extends AbstractInsertRepository implements InsertTsRepository<TsKvEntity> { public class PsqlInsertTsRepository extends AbstractInsertRepository implements InsertTsRepository<TsKvEntity> {
private static final String INSERT_INTO_TS_KV = "INSERT INTO ts_kv_"; private static final String INSERT_ON_CONFLICT_DO_UPDATE = "INSERT INTO ts_kv (entity_id, key, ts, bool_v, str_v, long_v, dbl_v, json_v) VALUES (?, ?, ?, ?, ?, ?, ?, cast(? AS json)) " +
private static final String VALUES_ON_CONFLICT_DO_UPDATE = " (entity_id, key, ts, bool_v, str_v, long_v, dbl_v, json_v) VALUES (?, ?, ?, ?, ?, ?, ?, cast(? AS json)) " +
"ON CONFLICT (entity_id, key, ts) DO UPDATE SET bool_v = ?, str_v = ?, long_v = ?, dbl_v = ?, json_v = cast(? AS json);"; "ON CONFLICT (entity_id, key, ts) DO UPDATE SET bool_v = ?, str_v = ?, long_v = ?, dbl_v = ?, json_v = cast(? AS json);";
@Override @Override
public void saveOrUpdate(List<EntityContainer<TsKvEntity>> entities) { public void saveOrUpdate(List<TsKvEntity> entities) {
Map<String, List<TsKvEntity>> partitionMap = new HashMap<>(); jdbcTemplate.batchUpdate(INSERT_ON_CONFLICT_DO_UPDATE, new BatchPreparedStatementSetter() {
for (EntityContainer<TsKvEntity> entityContainer : entities) {
List<TsKvEntity> tsKvEntities = partitionMap.computeIfAbsent(entityContainer.getPartitionDate(), k -> new ArrayList<>());
tsKvEntities.add(entityContainer.getEntity());
}
partitionMap.forEach((partition, entries) -> jdbcTemplate.batchUpdate(getInsertOrUpdateQuery(partition), new BatchPreparedStatementSetter() {
@Override @Override
public void setValues(PreparedStatement ps, int i) throws SQLException { public void setValues(PreparedStatement ps, int i) throws SQLException {
TsKvEntity tsKvEntity = entries.get(i); TsKvEntity tsKvEntity = entities.get(i);
ps.setObject(1, tsKvEntity.getEntityId()); ps.setObject(1, tsKvEntity.getEntityId());
ps.setInt(2, tsKvEntity.getKey()); ps.setInt(2, tsKvEntity.getKey());
ps.setLong(3, tsKvEntity.getTs()); ps.setLong(3, tsKvEntity.getTs());
@ -93,12 +82,9 @@ public class PsqlInsertTsRepository extends AbstractInsertRepository implements
@Override @Override
public int getBatchSize() { public int getBatchSize() {
return entries.size(); return entities.size();
} }
})); });
} }
private String getInsertOrUpdateQuery(String partitionDate) {
return INSERT_INTO_TS_KV + partitionDate + VALUES_ON_CONFLICT_DO_UPDATE;
}
} }

View File

@ -20,7 +20,6 @@ import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import org.thingsboard.server.dao.model.sqlts.timescale.ts.TimescaleTsKvEntity; import org.thingsboard.server.dao.model.sqlts.timescale.ts.TimescaleTsKvEntity;
import org.thingsboard.server.dao.sqlts.insert.AbstractInsertRepository; import org.thingsboard.server.dao.sqlts.insert.AbstractInsertRepository;
import org.thingsboard.server.dao.sqlts.EntityContainer;
import org.thingsboard.server.dao.sqlts.insert.InsertTsRepository; import org.thingsboard.server.dao.sqlts.insert.InsertTsRepository;
import org.thingsboard.server.dao.util.PsqlDao; import org.thingsboard.server.dao.util.PsqlDao;
import org.thingsboard.server.dao.util.TimescaleDBTsDao; import org.thingsboard.server.dao.util.TimescaleDBTsDao;
@ -41,11 +40,11 @@ public class TimescaleInsertTsRepository extends AbstractInsertRepository implem
"ON CONFLICT (entity_id, key, ts) DO UPDATE SET bool_v = ?, str_v = ?, long_v = ?, dbl_v = ?, json_v = cast(? AS json);"; "ON CONFLICT (entity_id, key, ts) DO UPDATE SET bool_v = ?, str_v = ?, long_v = ?, dbl_v = ?, json_v = cast(? AS json);";
@Override @Override
public void saveOrUpdate(List<EntityContainer<TimescaleTsKvEntity>> entities) { public void saveOrUpdate(List<TimescaleTsKvEntity> entities) {
jdbcTemplate.batchUpdate(INSERT_OR_UPDATE, new BatchPreparedStatementSetter() { jdbcTemplate.batchUpdate(INSERT_OR_UPDATE, new BatchPreparedStatementSetter() {
@Override @Override
public void setValues(PreparedStatement ps, int i) throws SQLException { public void setValues(PreparedStatement ps, int i) throws SQLException {
TimescaleTsKvEntity tsKvEntity = entities.get(i).getEntity(); TimescaleTsKvEntity tsKvEntity = entities.get(i);
ps.setObject(1, tsKvEntity.getEntityId()); ps.setObject(1, tsKvEntity.getEntityId());
ps.setInt(2, tsKvEntity.getKey()); ps.setInt(2, tsKvEntity.getKey());
ps.setLong(3, tsKvEntity.getTs()); ps.setLong(3, tsKvEntity.getTs());

View File

@ -25,7 +25,6 @@ import org.thingsboard.server.common.data.id.TenantId;
import org.thingsboard.server.common.data.kv.TsKvEntry; import org.thingsboard.server.common.data.kv.TsKvEntry;
import org.thingsboard.server.dao.model.sqlts.ts.TsKvEntity; import org.thingsboard.server.dao.model.sqlts.ts.TsKvEntity;
import org.thingsboard.server.dao.sqlts.AbstractChunkedAggregationTimeseriesDao; import org.thingsboard.server.dao.sqlts.AbstractChunkedAggregationTimeseriesDao;
import org.thingsboard.server.dao.sqlts.EntityContainer;
import org.thingsboard.server.dao.sqlts.insert.psql.PsqlPartitioningRepository; import org.thingsboard.server.dao.sqlts.insert.psql.PsqlPartitioningRepository;
import org.thingsboard.server.dao.timeseries.PsqlPartition; import org.thingsboard.server.dao.timeseries.PsqlPartition;
import org.thingsboard.server.dao.timeseries.SqlTsPartitionDate; import org.thingsboard.server.dao.timeseries.SqlTsPartitionDate;
@ -42,8 +41,6 @@ import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
import static org.thingsboard.server.dao.timeseries.SqlTsPartitionDate.EPOCH_START;
@Component @Component
@Slf4j @Slf4j
@ -58,7 +55,6 @@ public class JpaPsqlTimeseriesDao extends AbstractChunkedAggregationTimeseriesDa
private PsqlPartitioningRepository partitioningRepository; private PsqlPartitioningRepository partitioningRepository;
private SqlTsPartitionDate tsFormat; private SqlTsPartitionDate tsFormat;
private PsqlPartition indefinitePartition;
@Value("${sql.postgres.ts_key_value_partitioning:MONTHS}") @Value("${sql.postgres.ts_key_value_partitioning:MONTHS}")
private String partitioning; private String partitioning;
@ -69,10 +65,6 @@ public class JpaPsqlTimeseriesDao extends AbstractChunkedAggregationTimeseriesDa
Optional<SqlTsPartitionDate> partition = SqlTsPartitionDate.parse(partitioning); Optional<SqlTsPartitionDate> partition = SqlTsPartitionDate.parse(partitioning);
if (partition.isPresent()) { if (partition.isPresent()) {
tsFormat = partition.get(); tsFormat = partition.get();
if (tsFormat.equals(SqlTsPartitionDate.INDEFINITE)) {
indefinitePartition = new PsqlPartition(toMills(EPOCH_START), Long.MAX_VALUE, tsFormat.getPattern());
savePartition(indefinitePartition);
}
} else { } else {
log.warn("Incorrect configuration of partitioning {}", partitioning); log.warn("Incorrect configuration of partitioning {}", partitioning);
throw new RuntimeException("Failed to parse partitioning property: " + partitioning + "!"); throw new RuntimeException("Failed to parse partitioning property: " + partitioning + "!");
@ -81,6 +73,7 @@ public class JpaPsqlTimeseriesDao extends AbstractChunkedAggregationTimeseriesDa
@Override @Override
public ListenableFuture<Void> save(TenantId tenantId, EntityId entityId, TsKvEntry tsKvEntry, long ttl) { public ListenableFuture<Void> save(TenantId tenantId, EntityId entityId, TsKvEntry tsKvEntry, long ttl) {
savePartitionIfNotExist(tsKvEntry.getTs());
String strKey = tsKvEntry.getKey(); String strKey = tsKvEntry.getKey();
Integer keyId = getOrSaveKeyId(strKey); Integer keyId = getOrSaveKeyId(strKey);
TsKvEntity entity = new TsKvEntity(); TsKvEntity entity = new TsKvEntity();
@ -92,9 +85,23 @@ public class JpaPsqlTimeseriesDao extends AbstractChunkedAggregationTimeseriesDa
entity.setLongValue(tsKvEntry.getLongValue().orElse(null)); entity.setLongValue(tsKvEntry.getLongValue().orElse(null));
entity.setBooleanValue(tsKvEntry.getBooleanValue().orElse(null)); entity.setBooleanValue(tsKvEntry.getBooleanValue().orElse(null));
entity.setJsonValue(tsKvEntry.getJsonValue().orElse(null)); entity.setJsonValue(tsKvEntry.getJsonValue().orElse(null));
PsqlPartition psqlPartition = toPartition(tsKvEntry.getTs());
log.trace("Saving entity: {}", entity); log.trace("Saving entity: {}", entity);
return tsQueue.add(new EntityContainer(entity, psqlPartition.getPartitionDate())); return tsQueue.add(entity);
}
private void savePartitionIfNotExist(long ts) {
if (!tsFormat.equals(SqlTsPartitionDate.INDEFINITE)) {
LocalDateTime time = LocalDateTime.ofInstant(Instant.ofEpochMilli(ts), ZoneOffset.UTC);
LocalDateTime localDateTimeStart = tsFormat.trancateTo(time);
long partitionStartTs = toMills(localDateTimeStart);
if (partitions.get(partitionStartTs) == null) {
LocalDateTime localDateTimeEnd = tsFormat.plusTo(localDateTimeStart);
long partitionEndTs = toMills(localDateTimeEnd);
ZonedDateTime zonedDateTime = localDateTimeStart.atZone(ZoneOffset.UTC);
String partitionDate = zonedDateTime.format(DateTimeFormatter.ofPattern(tsFormat.getPattern()));
savePartition(new PsqlPartition(partitionStartTs, partitionEndTs, partitionDate));
}
}
} }
private void savePartition(PsqlPartition psqlPartition) { private void savePartition(PsqlPartition psqlPartition) {
@ -111,28 +118,6 @@ public class JpaPsqlTimeseriesDao extends AbstractChunkedAggregationTimeseriesDa
} }
} }
private PsqlPartition toPartition(long ts) {
if (tsFormat.equals(SqlTsPartitionDate.INDEFINITE)) {
return indefinitePartition;
} else {
LocalDateTime time = LocalDateTime.ofInstant(Instant.ofEpochMilli(ts), ZoneOffset.UTC);
LocalDateTime localDateTimeStart = tsFormat.trancateTo(time);
long partitionStartTs = toMills(localDateTimeStart);
PsqlPartition partition = partitions.get(partitionStartTs);
if (partition != null) {
return partition;
} else {
LocalDateTime localDateTimeEnd = tsFormat.plusTo(localDateTimeStart);
long partitionEndTs = toMills(localDateTimeEnd);
ZonedDateTime zonedDateTime = localDateTimeStart.atZone(ZoneOffset.UTC);
String partitionDate = zonedDateTime.format(DateTimeFormatter.ofPattern(tsFormat.getPattern()));
partition = new PsqlPartition(partitionStartTs, partitionEndTs, partitionDate);
savePartition(partition);
return partition;
}
}
}
private static long toMills(LocalDateTime time) { private static long toMills(LocalDateTime time) {
return time.toInstant(ZoneOffset.UTC).toEpochMilli(); return time.toInstant(ZoneOffset.UTC).toEpochMilli();
} }

View File

@ -36,7 +36,6 @@ import org.thingsboard.server.dao.model.sqlts.timescale.ts.TimescaleTsKvEntity;
import org.thingsboard.server.dao.sql.TbSqlBlockingQueue; import org.thingsboard.server.dao.sql.TbSqlBlockingQueue;
import org.thingsboard.server.dao.sql.TbSqlBlockingQueueParams; import org.thingsboard.server.dao.sql.TbSqlBlockingQueueParams;
import org.thingsboard.server.dao.sqlts.AbstractSqlTimeseriesDao; import org.thingsboard.server.dao.sqlts.AbstractSqlTimeseriesDao;
import org.thingsboard.server.dao.sqlts.EntityContainer;
import org.thingsboard.server.dao.sqlts.insert.InsertTsRepository; import org.thingsboard.server.dao.sqlts.insert.InsertTsRepository;
import org.thingsboard.server.dao.timeseries.TimeseriesDao; import org.thingsboard.server.dao.timeseries.TimeseriesDao;
import org.thingsboard.server.dao.util.TimescaleDBTsDao; import org.thingsboard.server.dao.util.TimescaleDBTsDao;
@ -64,7 +63,7 @@ public class TimescaleTimeseriesDao extends AbstractSqlTimeseriesDao implements
@Autowired @Autowired
protected InsertTsRepository<TimescaleTsKvEntity> insertRepository; protected InsertTsRepository<TimescaleTsKvEntity> insertRepository;
protected TbSqlBlockingQueue<EntityContainer<TimescaleTsKvEntity>> tsQueue; protected TbSqlBlockingQueue<TimescaleTsKvEntity> tsQueue;
@PostConstruct @PostConstruct
protected void init() { protected void init() {
@ -175,7 +174,7 @@ public class TimescaleTimeseriesDao extends AbstractSqlTimeseriesDao implements
entity.setJsonValue(tsKvEntry.getJsonValue().orElse(null)); entity.setJsonValue(tsKvEntry.getJsonValue().orElse(null));
log.trace("Saving entity to timescale db: {}", entity); log.trace("Saving entity to timescale db: {}", entity);
return tsQueue.add(new EntityContainer(entity, null)); return tsQueue.add(entity);
} }
@Override @Override

View File

@ -35,6 +35,6 @@ public class PsqlPartition {
} }
private String createStatement(long start, long end, String partitionDate) { private String createStatement(long start, long end, String partitionDate) {
return "CREATE TABLE IF NOT EXISTS " + TABLE_REGEX + partitionDate + " PARTITION OF ts_kv(PRIMARY KEY (entity_id, key, ts)) FOR VALUES FROM (" + start + ") TO (" + end + ")"; return "CREATE TABLE IF NOT EXISTS " + TABLE_REGEX + partitionDate + " PARTITION OF ts_kv FOR VALUES FROM (" + start + ") TO (" + end + ")";
} }
} }

View File

@ -23,7 +23,7 @@ import java.util.Optional;
public enum SqlTsPartitionDate { public enum SqlTsPartitionDate {
MINUTES("yyyy_MM_dd_HH_mm", ChronoUnit.MINUTES), HOURS("yyyy_MM_dd_HH", ChronoUnit.HOURS), DAYS("yyyy_MM_dd", ChronoUnit.DAYS), MONTHS("yyyy_MM", ChronoUnit.MONTHS), YEARS("yyyy", ChronoUnit.YEARS), INDEFINITE("indefinite", ChronoUnit.FOREVER); DAYS("yyyy_MM_dd", ChronoUnit.DAYS), MONTHS("yyyy_MM", ChronoUnit.MONTHS), YEARS("yyyy", ChronoUnit.YEARS), INDEFINITE("indefinite", ChronoUnit.FOREVER);
private final String pattern; private final String pattern;
private final transient TemporalUnit truncateUnit; private final transient TemporalUnit truncateUnit;
@ -44,10 +44,6 @@ public enum SqlTsPartitionDate {
public LocalDateTime trancateTo(LocalDateTime time) { public LocalDateTime trancateTo(LocalDateTime time) {
switch (this) { switch (this) {
case MINUTES:
return time.truncatedTo(ChronoUnit.MINUTES);
case HOURS:
return time.truncatedTo(ChronoUnit.HOURS);
case DAYS: case DAYS:
return time.truncatedTo(ChronoUnit.DAYS); return time.truncatedTo(ChronoUnit.DAYS);
case MONTHS: case MONTHS:
@ -63,10 +59,6 @@ public enum SqlTsPartitionDate {
public LocalDateTime plusTo(LocalDateTime time) { public LocalDateTime plusTo(LocalDateTime time) {
switch (this) { switch (this) {
case MINUTES:
return time.plusMinutes(1);
case HOURS:
return time.plusHours(1);
case DAYS: case DAYS:
return time.plusDays(1); return time.plusDays(1);
case MONTHS: case MONTHS:

View File

@ -23,7 +23,8 @@ CREATE TABLE IF NOT EXISTS ts_kv
str_v varchar(10000000), str_v varchar(10000000),
long_v bigint, long_v bigint,
dbl_v double precision, dbl_v double precision,
json_v json json_v json,
CONSTRAINT ts_kv_pkey PRIMARY KEY (entity_id, key, ts)
) PARTITION BY RANGE (ts); ) PARTITION BY RANGE (ts);
CREATE TABLE IF NOT EXISTS ts_kv_latest CREATE TABLE IF NOT EXISTS ts_kv_latest

View File

@ -31,14 +31,14 @@ public class JpaDaoTestSuite {
@ClassRule @ClassRule
public static CustomSqlUnit sqlUnit = new CustomSqlUnit( public static CustomSqlUnit sqlUnit = new CustomSqlUnit(
Arrays.asList("sql/schema-ts-hsql.sql", "sql/schema-entities-hsql.sql", "sql/system-data.sql"), Arrays.asList("sql/schema-ts-hsql.sql", "sql/schema-entities-hsql.sql", "sql/system-data.sql"),
"sql/drop-all-tables.sql", "sql/hsql/drop-all-tables.sql",
"sql-test.properties" "sql-test.properties"
); );
// @ClassRule // @ClassRule
// public static CustomSqlUnit sqlUnit = new CustomSqlUnit( // public static CustomSqlUnit sqlUnit = new CustomSqlUnit(
// Arrays.asList("sql/schema-ts-psql.sql", "sql/schema-entities.sql", "sql/system-data.sql"), // Arrays.asList("sql/schema-ts-psql.sql", "sql/schema-entities.sql", "sql/system-data.sql"),
// "sql/drop-all-tables.sql", // "sql/psql/drop-all-tables.sql",
// "sql-test.properties" // "sql-test.properties"
// ); // );

View File

@ -31,14 +31,14 @@ public class SqlDaoServiceTestSuite {
@ClassRule @ClassRule
public static CustomSqlUnit sqlUnit = new CustomSqlUnit( public static CustomSqlUnit sqlUnit = new CustomSqlUnit(
Arrays.asList("sql/schema-ts-hsql.sql", "sql/schema-entities-hsql.sql", "sql/schema-entities-idx.sql", "sql/system-data.sql", "sql/system-test.sql"), Arrays.asList("sql/schema-ts-hsql.sql", "sql/schema-entities-hsql.sql", "sql/schema-entities-idx.sql", "sql/system-data.sql", "sql/system-test.sql"),
"sql/drop-all-tables.sql", "sql/hsql/drop-all-tables.sql",
"sql-test.properties" "sql-test.properties"
); );
// @ClassRule // @ClassRule
// public static CustomSqlUnit sqlUnit = new CustomSqlUnit( // public static CustomSqlUnit sqlUnit = new CustomSqlUnit(
// Arrays.asList("sql/schema-ts-psql.sql", "sql/schema-entities.sql", "sql/schema-entities-idx.sql", "sql/system-data.sql", "sql/system-test.sql"), // Arrays.asList("sql/schema-ts-psql.sql", "sql/schema-entities.sql", "sql/schema-entities-idx.sql", "sql/system-data.sql", "sql/system-test.sql"),
// "sql/drop-all-tables.sql", // "sql/psql/drop-all-tables.sql",
// "sql-test.properties" // "sql-test.properties"
// ); // );

View File

@ -0,0 +1,24 @@
DROP TABLE IF EXISTS admin_settings;
DROP TABLE IF EXISTS alarm;
DROP TABLE IF EXISTS asset;
DROP TABLE IF EXISTS audit_log;
DROP TABLE IF EXISTS attribute_kv;
DROP TABLE IF EXISTS component_descriptor;
DROP TABLE IF EXISTS customer;
DROP TABLE IF EXISTS dashboard;
DROP TABLE IF EXISTS device;
DROP TABLE IF EXISTS device_credentials;
DROP TABLE IF EXISTS event;
DROP TABLE IF EXISTS relation;
DROP TABLE IF EXISTS tb_user;
DROP TABLE IF EXISTS tenant;
DROP TABLE IF EXISTS ts_kv;
DROP TABLE IF EXISTS ts_kv_latest;
DROP TABLE IF EXISTS ts_kv_dictionary;
DROP TABLE IF EXISTS user_credentials;
DROP TABLE IF EXISTS widget_type;
DROP TABLE IF EXISTS widgets_bundle;
DROP TABLE IF EXISTS rule_node;
DROP TABLE IF EXISTS rule_chain;
DROP TABLE IF EXISTS entity_view;
DROP TABLE IF EXISTS tb_schema_settings;

View File

@ -14,9 +14,11 @@ DROP TABLE IF EXISTS tb_user;
DROP TABLE IF EXISTS tenant; DROP TABLE IF EXISTS tenant;
DROP TABLE IF EXISTS ts_kv; DROP TABLE IF EXISTS ts_kv;
DROP TABLE IF EXISTS ts_kv_latest; DROP TABLE IF EXISTS ts_kv_latest;
DROP TABLE IF EXISTS ts_kv_dictionary;
DROP TABLE IF EXISTS user_credentials; DROP TABLE IF EXISTS user_credentials;
DROP TABLE IF EXISTS widget_type; DROP TABLE IF EXISTS widget_type;
DROP TABLE IF EXISTS widgets_bundle; DROP TABLE IF EXISTS widgets_bundle;
DROP TABLE IF EXISTS rule_node; DROP TABLE IF EXISTS rule_node;
DROP TABLE IF EXISTS rule_chain; DROP TABLE IF EXISTS rule_chain;
DROP TABLE IF EXISTS entity_view; DROP TABLE IF EXISTS entity_view;
DROP TABLE IF EXISTS tb_schema_settings;

View File

@ -59,7 +59,7 @@ JsInvokeMessageProcessor.prototype.onJsInvokeMessage = function(message) {
} else if (request.releaseRequest) { } else if (request.releaseRequest) {
this.processReleaseRequest(requestId, responseTopic, headers, request.releaseRequest); this.processReleaseRequest(requestId, responseTopic, headers, request.releaseRequest);
} else { } else {
logger.error('[%s] Unknown request recevied!', requestId); logger.error('[%s] Unknown request received!', requestId);
} }
} catch (err) { } catch (err) {

View File

@ -41,8 +41,6 @@ function KafkaProducer() {
} }
} }
let headersData = headers.data;
headersData = Object.fromEntries(Object.entries(headersData).map(([key, value]) => [key, Buffer.from(value)]));
return producer.send( return producer.send(
{ {
topic: responseTopic, topic: responseTopic,
@ -50,7 +48,7 @@ function KafkaProducer() {
{ {
key: scriptId, key: scriptId,
value: rawResponse, value: rawResponse,
headers: headersData headers: headers.data
} }
] ]
}); });
@ -96,15 +94,10 @@ function KafkaProducer() {
eachMessage: async ({topic, partition, message}) => { eachMessage: async ({topic, partition, message}) => {
let headers = message.headers; let headers = message.headers;
let key = message.key; let key = message.key;
let data = message.value;
let msg = {}; let msg = {};
headers = Object.fromEntries(
Object.entries(headers).map(([key, value]) => [key, [...value]]));
msg.key = key.toString('utf8'); msg.key = key.toString('utf8');
msg.data = [...data]; msg.data = message.value;
msg.headers = {data: headers} msg.headers = {data: headers};
messageProcessor.onJsInvokeMessage(msg); messageProcessor.onJsInvokeMessage(msg);
}, },
}); });

View File

@ -38,7 +38,7 @@ import java.io.IOException;
@Slf4j @Slf4j
@RuleNode( @RuleNode(
type = ComponentType.FILTER, type = ComponentType.FILTER,
name = "checks alarm status", name = "check alarm status",
configClazz = TbCheckAlarmStatusNodeConfig.class, configClazz = TbCheckAlarmStatusNodeConfig.class,
relationTypes = {"True", "False"}, relationTypes = {"True", "False"},
nodeDescription = "Checks alarm status.", nodeDescription = "Checks alarm status.",

View File

@ -386,6 +386,26 @@ function UserService($http, $q, $rootScope, adminService, dashboardService, time
deferred.reject(); deferred.reject();
} }
procceedJwtTokenValidate(); procceedJwtTokenValidate();
} else if (locationSearch.username && locationSearch.password) {
var user = {};
user.name = locationSearch.username;
user.password = locationSearch.password;
$location.search('username', null);
$location.search('password', null);
loginService.login(user).then(function success(response) {
var token = response.data.token;
var refreshToken = response.data.refreshToken;
try {
updateAndValidateToken(token, 'jwt_token', false);
updateAndValidateToken(refreshToken, 'refresh_token', false);
} catch (e) {
deferred.reject();
}
procceedJwtTokenValidate();
}, function fail() {
deferred.reject();
});
} else { } else {
procceedJwtTokenValidate(); procceedJwtTokenValidate();
} }

View File

@ -20,7 +20,7 @@ import logoSvg from '../../svg/logo_title_white.svg';
/* eslint-enable import/no-unresolved, import/default */ /* eslint-enable import/no-unresolved, import/default */
/*@ngInject*/ /*@ngInject*/
export default function LoginController(toast, loginService, userService, types, $state, $stateParams/*, $rootScope, $log, $translate*/) { export default function LoginController(toast, loginService, userService, types, $state/*, $rootScope, $log, $translate*/) {
var vm = this; var vm = this;
vm.logoSvg = logoSvg; vm.logoSvg = logoSvg;
@ -32,12 +32,6 @@ export default function LoginController(toast, loginService, userService, types,
vm.login = login; vm.login = login;
if ($stateParams.username && $stateParams.password) {
vm.user.name = $stateParams.username;
vm.user.password = $stateParams.password;
doLogin();
}
function doLogin() { function doLogin() {
loginService.login(vm.user).then(function success(response) { loginService.login(vm.user).then(function success(response) {
var token = response.data.token; var token = response.data.token;

View File

@ -25,7 +25,7 @@ import createPasswordTemplate from './create-password.tpl.html';
/*@ngInject*/ /*@ngInject*/
export default function LoginRoutes($stateProvider) { export default function LoginRoutes($stateProvider) {
$stateProvider.state('login', { $stateProvider.state('login', {
url: '/login?username&password', url: '/login',
module: 'public', module: 'public',
views: { views: {
"@": { "@": {