Merge branch 'develop/3.4' into refactoring_tests_08
This commit is contained in:
commit
eb50e96f3e
@ -116,8 +116,13 @@ public abstract class AbstractBufferedRateExecutor<T extends AsyncTask, F extend
|
|||||||
F result = wrap(task, settableFuture);
|
F result = wrap(task, settableFuture);
|
||||||
|
|
||||||
boolean perTenantLimitReached = false;
|
boolean perTenantLimitReached = false;
|
||||||
var tenantProfileConfiguration = tenantProfileCache.get(task.getTenantId()).getDefaultProfileConfiguration();
|
|
||||||
if (StringUtils.isNotEmpty(tenantProfileConfiguration.getCassandraQueryTenantRateLimitsConfiguration())) {
|
var tenantProfileConfiguration =
|
||||||
|
(task.getTenantId() != null && !TenantId.SYS_TENANT_ID.equals(task.getTenantId()))
|
||||||
|
? tenantProfileCache.get(task.getTenantId()).getDefaultProfileConfiguration()
|
||||||
|
: null;
|
||||||
|
if (tenantProfileConfiguration != null &&
|
||||||
|
StringUtils.isNotEmpty(tenantProfileConfiguration.getCassandraQueryTenantRateLimitsConfiguration())) {
|
||||||
if (task.getTenantId() == null) {
|
if (task.getTenantId() == null) {
|
||||||
log.info("Invalid task received: {}", task);
|
log.info("Invalid task received: {}", task);
|
||||||
} else if (!task.getTenantId().isNullUid()) {
|
} else if (!task.getTenantId().isNullUid()) {
|
||||||
|
|||||||
27
docker/docker-compose.cassandra.volumes.yml
Normal file
27
docker/docker-compose.cassandra.volumes.yml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
#
|
||||||
|
# Copyright © 2016-2022 The Thingsboard Authors
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
version: '2.2'
|
||||||
|
|
||||||
|
services:
|
||||||
|
cassandra:
|
||||||
|
volumes:
|
||||||
|
- cassandra-volume:/var/lib/cassandra
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
cassandra-volume:
|
||||||
|
external: true
|
||||||
|
name: ${CASSANDRA_DATA_VOLUME}
|
||||||
@ -29,7 +29,7 @@ services:
|
|||||||
- ./tb-node/postgres:/var/lib/postgresql/data
|
- ./tb-node/postgres:/var/lib/postgresql/data
|
||||||
cassandra:
|
cassandra:
|
||||||
restart: always
|
restart: always
|
||||||
image: "cassandra:3.11.3"
|
image: "cassandra:4.0.4"
|
||||||
ports:
|
ports:
|
||||||
- "9042"
|
- "9042"
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
@ -26,5 +26,9 @@ As result, in REPOSITORY column, next images should be present:
|
|||||||
|
|
||||||
mvn clean install -DblackBoxTests.skip=false -DblackBoxTests.redisCluster=true
|
mvn clean install -DblackBoxTests.skip=false -DblackBoxTests.redisCluster=true
|
||||||
|
|
||||||
|
- Run the black box tests in the [msa/black-box-tests](../black-box-tests) directory in Hybrid mode (postgres + cassandra):
|
||||||
|
|
||||||
|
mvn clean install -DblackBoxTests.skip=false -DblackBoxTests.hybridMode=true
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -27,6 +27,9 @@ import java.io.File;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
import static org.hamcrest.CoreMatchers.containsString;
|
import static org.hamcrest.CoreMatchers.containsString;
|
||||||
@ -40,6 +43,7 @@ import static org.junit.Assert.fail;
|
|||||||
@Slf4j
|
@Slf4j
|
||||||
public class ContainerTestSuite {
|
public class ContainerTestSuite {
|
||||||
final static boolean IS_REDIS_CLUSTER = Boolean.parseBoolean(System.getProperty("blackBoxTests.redisCluster"));
|
final static boolean IS_REDIS_CLUSTER = Boolean.parseBoolean(System.getProperty("blackBoxTests.redisCluster"));
|
||||||
|
final static boolean IS_HYBRID_MODE = Boolean.parseBoolean(System.getProperty("blackBoxTests.hybridMode"));
|
||||||
private static final String SOURCE_DIR = "./../../docker/";
|
private static final String SOURCE_DIR = "./../../docker/";
|
||||||
private static final String TB_CORE_LOG_REGEXP = ".*Starting polling for events.*";
|
private static final String TB_CORE_LOG_REGEXP = ".*Starting polling for events.*";
|
||||||
private static final String TRANSPORTS_LOG_REGEXP = ".*Going to recalculate partitions.*";
|
private static final String TRANSPORTS_LOG_REGEXP = ".*Going to recalculate partitions.*";
|
||||||
@ -53,6 +57,7 @@ public class ContainerTestSuite {
|
|||||||
public static DockerComposeContainer getTestContainer() {
|
public static DockerComposeContainer getTestContainer() {
|
||||||
if (testContainer == null) {
|
if (testContainer == null) {
|
||||||
log.info("System property of blackBoxTests.redisCluster is {}", IS_REDIS_CLUSTER);
|
log.info("System property of blackBoxTests.redisCluster is {}", IS_REDIS_CLUSTER);
|
||||||
|
log.info("System property of blackBoxTests.hybridMode is {}", IS_HYBRID_MODE);
|
||||||
boolean skipTailChildContainers = Boolean.valueOf(System.getProperty("blackBoxTests.skipTailChildContainers"));
|
boolean skipTailChildContainers = Boolean.valueOf(System.getProperty("blackBoxTests.skipTailChildContainers"));
|
||||||
try {
|
try {
|
||||||
final String targetDir = FileUtils.getTempDirectoryPath() + "/" + "ContainerTestSuite-" + UUID.randomUUID() + "/";
|
final String targetDir = FileUtils.getTempDirectoryPath() + "/" + "ContainerTestSuite-" + UUID.randomUUID() + "/";
|
||||||
@ -61,7 +66,7 @@ public class ContainerTestSuite {
|
|||||||
replaceInFile(targetDir + "docker-compose.yml", " container_name: \"${LOAD_BALANCER_NAME}\"", "", "container_name");
|
replaceInFile(targetDir + "docker-compose.yml", " container_name: \"${LOAD_BALANCER_NAME}\"", "", "container_name");
|
||||||
|
|
||||||
class DockerComposeContainerImpl<SELF extends DockerComposeContainer<SELF>> extends DockerComposeContainer<SELF> {
|
class DockerComposeContainerImpl<SELF extends DockerComposeContainer<SELF>> extends DockerComposeContainer<SELF> {
|
||||||
public DockerComposeContainerImpl(File... composeFiles) {
|
public DockerComposeContainerImpl(List<File> composeFiles) {
|
||||||
super(composeFiles);
|
super(composeFiles);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -72,10 +77,11 @@ public class ContainerTestSuite {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
testContainer = new DockerComposeContainerImpl<>(
|
List<File> composeFiles = new ArrayList<>(Arrays.asList(new File(targetDir + "docker-compose.yml"),
|
||||||
new File(targetDir + "docker-compose.yml"),
|
|
||||||
new File(targetDir + "docker-compose.volumes.yml"),
|
new File(targetDir + "docker-compose.volumes.yml"),
|
||||||
new File(targetDir + "docker-compose.postgres.yml"),
|
IS_HYBRID_MODE
|
||||||
|
? new File(targetDir + "docker-compose.hybrid.yml")
|
||||||
|
: new File(targetDir + "docker-compose.postgres.yml"),
|
||||||
new File(targetDir + "docker-compose.postgres.volumes.yml"),
|
new File(targetDir + "docker-compose.postgres.volumes.yml"),
|
||||||
new File(targetDir + "docker-compose.kafka.yml"),
|
new File(targetDir + "docker-compose.kafka.yml"),
|
||||||
IS_REDIS_CLUSTER
|
IS_REDIS_CLUSTER
|
||||||
@ -83,8 +89,13 @@ public class ContainerTestSuite {
|
|||||||
: new File("./../../docker/docker-compose.redis.yml"),
|
: new File("./../../docker/docker-compose.redis.yml"),
|
||||||
IS_REDIS_CLUSTER
|
IS_REDIS_CLUSTER
|
||||||
? new File("./../../docker/docker-compose.redis-cluster.volumes.yml")
|
? new File("./../../docker/docker-compose.redis-cluster.volumes.yml")
|
||||||
: new File("./../../docker/docker-compose.redis.volumes.yml")
|
: new File("./../../docker/docker-compose.redis.volumes.yml")));
|
||||||
)
|
|
||||||
|
if (IS_HYBRID_MODE) {
|
||||||
|
composeFiles.add(new File(targetDir + "docker-compose.cassandra.volumes.yml"));
|
||||||
|
}
|
||||||
|
|
||||||
|
testContainer = new DockerComposeContainerImpl<>(composeFiles)
|
||||||
.withPull(false)
|
.withPull(false)
|
||||||
.withLocalCompose(true)
|
.withLocalCompose(true)
|
||||||
.withTailChildContainers(!skipTailChildContainers)
|
.withTailChildContainers(!skipTailChildContainers)
|
||||||
|
|||||||
@ -21,10 +21,7 @@ import org.junit.rules.ExternalResource;
|
|||||||
import org.testcontainers.utility.Base58;
|
import org.testcontainers.utility.Base58;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.util.Arrays;
|
import java.util.*;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.IntStream;
|
import java.util.stream.IntStream;
|
||||||
|
|
||||||
@ -32,7 +29,10 @@ import java.util.stream.IntStream;
|
|||||||
public class ThingsBoardDbInstaller extends ExternalResource {
|
public class ThingsBoardDbInstaller extends ExternalResource {
|
||||||
|
|
||||||
final static boolean IS_REDIS_CLUSTER = Boolean.parseBoolean(System.getProperty("blackBoxTests.redisCluster"));
|
final static boolean IS_REDIS_CLUSTER = Boolean.parseBoolean(System.getProperty("blackBoxTests.redisCluster"));
|
||||||
|
final static boolean IS_HYBRID_MODE = Boolean.parseBoolean(System.getProperty("blackBoxTests.hybridMode"));
|
||||||
private final static String POSTGRES_DATA_VOLUME = "tb-postgres-test-data-volume";
|
private final static String POSTGRES_DATA_VOLUME = "tb-postgres-test-data-volume";
|
||||||
|
|
||||||
|
private final static String CASSANDRA_DATA_VOLUME = "tb-cassandra-test-data-volume";
|
||||||
private final static String REDIS_DATA_VOLUME = "tb-redis-data-volume";
|
private final static String REDIS_DATA_VOLUME = "tb-redis-data-volume";
|
||||||
private final static String REDIS_CLUSTER_DATA_VOLUME = "tb-redis-cluster-data-volume";
|
private final static String REDIS_CLUSTER_DATA_VOLUME = "tb-redis-cluster-data-volume";
|
||||||
private final static String TB_LOG_VOLUME = "tb-log-test-volume";
|
private final static String TB_LOG_VOLUME = "tb-log-test-volume";
|
||||||
@ -46,6 +46,7 @@ public class ThingsBoardDbInstaller extends ExternalResource {
|
|||||||
private final DockerComposeExecutor dockerCompose;
|
private final DockerComposeExecutor dockerCompose;
|
||||||
|
|
||||||
private final String postgresDataVolume;
|
private final String postgresDataVolume;
|
||||||
|
private final String cassandraDataVolume;
|
||||||
|
|
||||||
private final String redisDataVolume;
|
private final String redisDataVolume;
|
||||||
private final String redisClusterDataVolume;
|
private final String redisClusterDataVolume;
|
||||||
@ -60,10 +61,13 @@ public class ThingsBoardDbInstaller extends ExternalResource {
|
|||||||
|
|
||||||
public ThingsBoardDbInstaller() {
|
public ThingsBoardDbInstaller() {
|
||||||
log.info("System property of blackBoxTests.redisCluster is {}", IS_REDIS_CLUSTER);
|
log.info("System property of blackBoxTests.redisCluster is {}", IS_REDIS_CLUSTER);
|
||||||
List<File> composeFiles = Arrays.asList(
|
log.info("System property of blackBoxTests.hybridMode is {}", IS_HYBRID_MODE);
|
||||||
|
List<File> composeFiles = new ArrayList<>(Arrays.asList(
|
||||||
new File("./../../docker/docker-compose.yml"),
|
new File("./../../docker/docker-compose.yml"),
|
||||||
new File("./../../docker/docker-compose.volumes.yml"),
|
new File("./../../docker/docker-compose.volumes.yml"),
|
||||||
new File("./../../docker/docker-compose.postgres.yml"),
|
IS_HYBRID_MODE
|
||||||
|
? new File("./../../docker/docker-compose.hybrid.yml")
|
||||||
|
: new File("./../../docker/docker-compose.postgres.yml"),
|
||||||
new File("./../../docker/docker-compose.postgres.volumes.yml"),
|
new File("./../../docker/docker-compose.postgres.volumes.yml"),
|
||||||
IS_REDIS_CLUSTER
|
IS_REDIS_CLUSTER
|
||||||
? new File("./../../docker/docker-compose.redis-cluster.yml")
|
? new File("./../../docker/docker-compose.redis-cluster.yml")
|
||||||
@ -71,12 +75,16 @@ public class ThingsBoardDbInstaller extends ExternalResource {
|
|||||||
IS_REDIS_CLUSTER
|
IS_REDIS_CLUSTER
|
||||||
? new File("./../../docker/docker-compose.redis-cluster.volumes.yml")
|
? new File("./../../docker/docker-compose.redis-cluster.volumes.yml")
|
||||||
: new File("./../../docker/docker-compose.redis.volumes.yml")
|
: new File("./../../docker/docker-compose.redis.volumes.yml")
|
||||||
);
|
));
|
||||||
|
if (IS_HYBRID_MODE) {
|
||||||
|
composeFiles.add(new File("./../../docker/docker-compose.cassandra.volumes.yml"));
|
||||||
|
}
|
||||||
|
|
||||||
String identifier = Base58.randomString(6).toLowerCase();
|
String identifier = Base58.randomString(6).toLowerCase();
|
||||||
String project = identifier + Base58.randomString(6).toLowerCase();
|
String project = identifier + Base58.randomString(6).toLowerCase();
|
||||||
|
|
||||||
postgresDataVolume = project + "_" + POSTGRES_DATA_VOLUME;
|
postgresDataVolume = project + "_" + POSTGRES_DATA_VOLUME;
|
||||||
|
cassandraDataVolume = project + "_" + CASSANDRA_DATA_VOLUME;
|
||||||
redisDataVolume = project + "_" + REDIS_DATA_VOLUME;
|
redisDataVolume = project + "_" + REDIS_DATA_VOLUME;
|
||||||
redisClusterDataVolume = project + "_" + REDIS_CLUSTER_DATA_VOLUME;
|
redisClusterDataVolume = project + "_" + REDIS_CLUSTER_DATA_VOLUME;
|
||||||
tbLogVolume = project + "_" + TB_LOG_VOLUME;
|
tbLogVolume = project + "_" + TB_LOG_VOLUME;
|
||||||
@ -91,6 +99,9 @@ public class ThingsBoardDbInstaller extends ExternalResource {
|
|||||||
|
|
||||||
env = new HashMap<>();
|
env = new HashMap<>();
|
||||||
env.put("POSTGRES_DATA_VOLUME", postgresDataVolume);
|
env.put("POSTGRES_DATA_VOLUME", postgresDataVolume);
|
||||||
|
if (IS_HYBRID_MODE) {
|
||||||
|
env.put("CASSANDRA_DATA_VOLUME", cassandraDataVolume);
|
||||||
|
}
|
||||||
env.put("TB_LOG_VOLUME", tbLogVolume);
|
env.put("TB_LOG_VOLUME", tbLogVolume);
|
||||||
env.put("TB_COAP_TRANSPORT_LOG_VOLUME", tbCoapTransportLogVolume);
|
env.put("TB_COAP_TRANSPORT_LOG_VOLUME", tbCoapTransportLogVolume);
|
||||||
env.put("TB_LWM2M_TRANSPORT_LOG_VOLUME", tbLwm2mTransportLogVolume);
|
env.put("TB_LWM2M_TRANSPORT_LOG_VOLUME", tbLwm2mTransportLogVolume);
|
||||||
@ -119,6 +130,11 @@ public class ThingsBoardDbInstaller extends ExternalResource {
|
|||||||
dockerCompose.withCommand("volume create " + postgresDataVolume);
|
dockerCompose.withCommand("volume create " + postgresDataVolume);
|
||||||
dockerCompose.invokeDocker();
|
dockerCompose.invokeDocker();
|
||||||
|
|
||||||
|
if (IS_HYBRID_MODE) {
|
||||||
|
dockerCompose.withCommand("volume create " + cassandraDataVolume);
|
||||||
|
dockerCompose.invokeDocker();
|
||||||
|
}
|
||||||
|
|
||||||
dockerCompose.withCommand("volume create " + tbLogVolume);
|
dockerCompose.withCommand("volume create " + tbLogVolume);
|
||||||
dockerCompose.invokeDocker();
|
dockerCompose.invokeDocker();
|
||||||
|
|
||||||
@ -140,20 +156,23 @@ public class ThingsBoardDbInstaller extends ExternalResource {
|
|||||||
dockerCompose.withCommand("volume create " + tbVcExecutorLogVolume);
|
dockerCompose.withCommand("volume create " + tbVcExecutorLogVolume);
|
||||||
dockerCompose.invokeDocker();
|
dockerCompose.invokeDocker();
|
||||||
|
|
||||||
String redisService = "";
|
String additionalServices = "";
|
||||||
|
if (IS_HYBRID_MODE) {
|
||||||
|
additionalServices += " cassandra";
|
||||||
|
}
|
||||||
if (IS_REDIS_CLUSTER) {
|
if (IS_REDIS_CLUSTER) {
|
||||||
for (int i = 0; i < 6; i++) {
|
for (int i = 0; i < 6; i++) {
|
||||||
redisService = redisService + " redis-node-" + i;
|
additionalServices = additionalServices + " redis-node-" + i;
|
||||||
dockerCompose.withCommand("volume create " + redisClusterDataVolume + '-' + i);
|
dockerCompose.withCommand("volume create " + redisClusterDataVolume + '-' + i);
|
||||||
dockerCompose.invokeDocker();
|
dockerCompose.invokeDocker();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
redisService = "redis";
|
additionalServices += " redis";
|
||||||
dockerCompose.withCommand("volume create " + redisDataVolume);
|
dockerCompose.withCommand("volume create " + redisDataVolume);
|
||||||
dockerCompose.invokeDocker();
|
dockerCompose.invokeDocker();
|
||||||
}
|
}
|
||||||
|
|
||||||
dockerCompose.withCommand("up -d postgres " + redisService);
|
dockerCompose.withCommand("up -d postgres" + additionalServices);
|
||||||
dockerCompose.invokeCompose();
|
dockerCompose.invokeCompose();
|
||||||
|
|
||||||
dockerCompose.withCommand("run --no-deps --rm -e INSTALL_TB=true -e LOAD_DEMO=true tb-core1");
|
dockerCompose.withCommand("run --no-deps --rm -e INSTALL_TB=true -e LOAD_DEMO=true tb-core1");
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user