update the fix in the API
This commit is contained in:
parent
a3b005e45a
commit
94bb9f5be3
@ -138,31 +138,31 @@ class DefaultTbContext implements TbContext {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void enqueueForTellFailure(TbMsg tbMsg, String failureMessage) {
|
public void enqueueForTellFailure(TbMsg tbMsg, String failureMessage) {
|
||||||
TopicPartitionInfo tpi = mainCtx.resolve(ServiceType.TB_RULE_ENGINE, getTenantId(), tbMsg.getOriginator());
|
TopicPartitionInfo tpi = mainCtx.resolve(ServiceType.TB_RULE_ENGINE, tbMsg.getQueueName(), getTenantId(), tbMsg.getOriginator());
|
||||||
enqueueForTellNext(tpi, tbMsg, Collections.singleton(TbRelationTypes.FAILURE), failureMessage, null, null);
|
enqueueForTellNext(tpi, tbMsg, Collections.singleton(TbRelationTypes.FAILURE), failureMessage, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void enqueueForTellNext(TbMsg tbMsg, String relationType) {
|
public void enqueueForTellNext(TbMsg tbMsg, String relationType) {
|
||||||
TopicPartitionInfo tpi = mainCtx.resolve(ServiceType.TB_RULE_ENGINE, getTenantId(), tbMsg.getOriginator());
|
TopicPartitionInfo tpi = mainCtx.resolve(ServiceType.TB_RULE_ENGINE, tbMsg.getQueueName(), getTenantId(), tbMsg.getOriginator());
|
||||||
enqueueForTellNext(tpi, tbMsg, Collections.singleton(relationType), null, null, null);
|
enqueueForTellNext(tpi, tbMsg, Collections.singleton(relationType), null, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void enqueueForTellNext(TbMsg tbMsg, Set<String> relationTypes) {
|
public void enqueueForTellNext(TbMsg tbMsg, Set<String> relationTypes) {
|
||||||
TopicPartitionInfo tpi = mainCtx.resolve(ServiceType.TB_RULE_ENGINE, getTenantId(), tbMsg.getOriginator());
|
TopicPartitionInfo tpi = mainCtx.resolve(ServiceType.TB_RULE_ENGINE, tbMsg.getQueueName(), getTenantId(), tbMsg.getOriginator());
|
||||||
enqueueForTellNext(tpi, tbMsg, relationTypes, null, null, null);
|
enqueueForTellNext(tpi, tbMsg, relationTypes, null, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void enqueueForTellNext(TbMsg tbMsg, String relationType, Runnable onSuccess, Consumer<Throwable> onFailure) {
|
public void enqueueForTellNext(TbMsg tbMsg, String relationType, Runnable onSuccess, Consumer<Throwable> onFailure) {
|
||||||
TopicPartitionInfo tpi = mainCtx.resolve(ServiceType.TB_RULE_ENGINE, getTenantId(), tbMsg.getOriginator());
|
TopicPartitionInfo tpi = mainCtx.resolve(ServiceType.TB_RULE_ENGINE, tbMsg.getQueueName(), getTenantId(), tbMsg.getOriginator());
|
||||||
enqueueForTellNext(tpi, tbMsg, Collections.singleton(relationType), null, onSuccess, onFailure);
|
enqueueForTellNext(tpi, tbMsg, Collections.singleton(relationType), null, onSuccess, onFailure);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void enqueueForTellNext(TbMsg tbMsg, Set<String> relationTypes, Runnable onSuccess, Consumer<Throwable> onFailure) {
|
public void enqueueForTellNext(TbMsg tbMsg, Set<String> relationTypes, Runnable onSuccess, Consumer<Throwable> onFailure) {
|
||||||
TopicPartitionInfo tpi = mainCtx.resolve(ServiceType.TB_RULE_ENGINE, getTenantId(), tbMsg.getOriginator());
|
TopicPartitionInfo tpi = mainCtx.resolve(ServiceType.TB_RULE_ENGINE, tbMsg.getQueueName(), getTenantId(), tbMsg.getOriginator());
|
||||||
enqueueForTellNext(tpi, tbMsg, relationTypes, null, onSuccess, onFailure);
|
enqueueForTellNext(tpi, tbMsg, relationTypes, null, onSuccess, onFailure);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -113,7 +113,7 @@ public class TbSendRPCRequestNode implements TbNode {
|
|||||||
ctx.getRpcService().sendRpcRequestToDevice(request, ruleEngineDeviceRpcResponse -> {
|
ctx.getRpcService().sendRpcRequestToDevice(request, ruleEngineDeviceRpcResponse -> {
|
||||||
if (!ruleEngineDeviceRpcResponse.getError().isPresent()) {
|
if (!ruleEngineDeviceRpcResponse.getError().isPresent()) {
|
||||||
TbMsg next = ctx.newMsg(msg.getQueueName(), msg.getType(), msg.getOriginator(), msg.getMetaData(), ruleEngineDeviceRpcResponse.getResponse().orElse("{}"));
|
TbMsg next = ctx.newMsg(msg.getQueueName(), msg.getType(), msg.getOriginator(), msg.getMetaData(), ruleEngineDeviceRpcResponse.getResponse().orElse("{}"));
|
||||||
ctx.enqueueForTellNext(next, next.getQueueName(), TbRelationTypes.SUCCESS, null, null);
|
ctx.enqueueForTellNext(next, TbRelationTypes.SUCCESS);
|
||||||
} else {
|
} else {
|
||||||
TbMsg next = ctx.newMsg(msg.getQueueName(), msg.getType(), msg.getOriginator(), msg.getMetaData(), wrap("error", ruleEngineDeviceRpcResponse.getError().get().name()));
|
TbMsg next = ctx.newMsg(msg.getQueueName(), msg.getType(), msg.getOriginator(), msg.getMetaData(), wrap("error", ruleEngineDeviceRpcResponse.getError().get().name()));
|
||||||
ctx.tellFailure(next, new RuntimeException(ruleEngineDeviceRpcResponse.getError().get().name()));
|
ctx.tellFailure(next, new RuntimeException(ruleEngineDeviceRpcResponse.getError().get().name()));
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user