focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public JmxCollector register() { return register(PrometheusRegistry.defaultRegistry); }
@Test public void testNameIsReplacedOnMatch() throws Exception { new JmxCollector( "\n---\nrules:\n- pattern: `^hadoop<service=DataNode, name=DataNodeActivity-ams-hdd001-50010><>replaceBlockOpMinTime:`\n name: foo" .replace('`', '"')) .register(prometheusRegistry); assertEquals(200, getSampleValue("foo", new String[] {}, new String[] {}), .001); }
@Override public GenericRow transform(GenericRow record) { try { GenericRow originalRow = _fieldsToUnnest.isEmpty() ? null : record.copy(_fieldsToUnnest); flattenMap(record, new ArrayList<>(record.getFieldToValueMap().keySet())); for (String field : _fieldsToUnnest) { unnestCollection(record, field); } Object unnestedRows = record.getValue(GenericRow.MULTIPLE_RECORDS_KEY); if (originalRow != null && unnestedRows instanceof Collection) { for (GenericRow unnestedRow : (Collection<GenericRow>) unnestedRows) { for (String field : _fieldsToUnnest) { unnestedRow.putValue(field, originalRow.getValue(field)); } } } renamePrefixes(record); } catch (Exception e) { if (!_continueOnError) { throw new RuntimeException("Caught exception while transforming complex types", e); } else { LOGGER.debug("Caught exception while transforming complex types for record: {}", record.toString(), e); record.putValue(GenericRow.INCOMPLETE_RECORD_KEY, true); } } return record; }
@Test public void testPrefixesToRename() { HashMap<String, String> prefixesToRename = new HashMap<>(); prefixesToRename.put("map1.", ""); prefixesToRename.put("map2", "test"); ComplexTypeTransformer transformer = new ComplexTypeTransformer(new ArrayList<>(), ".", DEFAULT_COLLECTION_TO_JSON_MODE, prefixesToRename, null); // test flatten root-level tuples GenericRow genericRow = new GenericRow(); genericRow.putValue("a", 1L); Map<String, Object> map1 = new HashMap<>(); genericRow.putValue("map1", map1); map1.put("b", "v"); Map<String, Object> innerMap1 = new HashMap<>(); innerMap1.put("aa", 2); innerMap1.put("bb", "u"); map1.put("im1", innerMap1); Map<String, Object> map2 = new HashMap<>(); map2.put("c", 3); genericRow.putValue("map2", map2); transformer.transform(genericRow); Assert.assertEquals(genericRow.getValue("a"), 1L); Assert.assertEquals(genericRow.getValue("b"), "v"); Assert.assertEquals(genericRow.getValue("im1.aa"), 2); Assert.assertEquals(genericRow.getValue("im1.bb"), "u"); Assert.assertEquals(genericRow.getValue("test.c"), 3); }
@Override public AppSettings load() { Properties p = loadPropertiesFile(homeDir); Set<String> keysOverridableFromEnv = stream(ProcessProperties.Property.values()).map(ProcessProperties.Property::getKey) .collect(Collectors.toSet()); keysOverridableFromEnv.addAll(p.stringPropertyNames()); // 1st pass to load static properties Props staticProps = reloadProperties(keysOverridableFromEnv, p); keysOverridableFromEnv.addAll(getDynamicPropertiesKeys(staticProps)); // 2nd pass to load dynamic properties like `ldap.*.url` or `ldap.*.baseDn` which keys depend on values of static // properties loaded in 1st step Props props = reloadProperties(keysOverridableFromEnv, p); new ProcessProperties(serviceLoaderWrapper).completeDefaults(props); stream(consumers).forEach(c -> c.accept(props)); return new AppSettingsImpl(props); }
@Test public void command_line_arguments_take_precedence_over_env_vars() throws Exception { when(system.getenv()).thenReturn(ImmutableMap.of("SONAR_CUSTOMPROP", "11")); when(system.getenv("SONAR_CUSTOMPROP")).thenReturn("11"); File homeDir = temp.newFolder(); File propsFile = new File(homeDir, "conf/sonar.properties"); FileUtils.write(propsFile, "sonar.customProp=10", UTF_8); AppSettingsLoaderImpl underTest = new AppSettingsLoaderImpl(system, new String[] {"-Dsonar.customProp=9"}, homeDir, serviceLoaderWrapper); AppSettings settings = underTest.load(); assertThat(settings.getProps().rawProperties()).contains(entry("sonar.customProp", "9")); }
public Offloaders getOrLoadOffloaders(String offloadersPath, String narExtractionDirectory) { return loadedOffloaders.computeIfAbsent(offloadersPath, (directory) -> { try { return OffloaderUtils.searchForOffloaders(directory, narExtractionDirectory); } catch (IOException e) { throw new RuntimeException(e); } }); }
@Test public void testLoadsOnlyOnce() throws Exception { Offloaders expectedOffloaders = new Offloaders(); try (MockedStatic<OffloaderUtils> offloaderUtils = Mockito.mockStatic(OffloaderUtils.class)) { offloaderUtils.when(() -> OffloaderUtils.searchForOffloaders(eq("./offloaders"), eq("/tmp"))) .thenReturn(expectedOffloaders); OffloadersCache cache = new OffloadersCache(); // Call a first time to load the offloader Offloaders offloaders1 = cache.getOrLoadOffloaders("./offloaders", "/tmp"); assertSame(offloaders1, expectedOffloaders, "The offloaders should be the mocked one."); // Call a second time to get the stored offlaoder Offloaders offloaders2 = cache.getOrLoadOffloaders("./offloaders", "/tmp"); assertSame(offloaders2, expectedOffloaders, "The offloaders should be the mocked one."); } }
@Override public <Request extends RequestCommand> boolean deserializeContent(Request request) throws DeserializationException { if (request instanceof RpcRequestCommand) { RpcRequestCommand requestCommand = (RpcRequestCommand) request; Object header = requestCommand.getRequestHeader(); if (!(header instanceof Map)) { throw new DeserializationException("Head of request is null or is not map"); } Map<String, String> headerMap = (Map<String, String>) header; String traceId = headerMap.get("rpc_trace_context.sofaTraceId"); String rpcId = headerMap.get("rpc_trace_context.sofaRpcId"); long deserializeStartTime = System.nanoTime(); try { byte[] content = requestCommand.getContent(); if (content == null || content.length == 0) { throw new DeserializationException("Content of request is null"); } String service = headerMap.get(RemotingConstants.HEAD_SERVICE); ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader(); ClassLoader serviceClassLoader = ReflectCache.getServiceClassLoader(service); try { Thread.currentThread().setContextClassLoader(serviceClassLoader); Serializer rpcSerializer = com.alipay.sofa.rpc.codec.SerializerFactory .getSerializer(requestCommand.getSerializer()); Object sofaRequest = ClassUtils.forName(requestCommand.getRequestClass()).newInstance(); rpcSerializer.decode(new ByteArrayWrapperByteBuf(requestCommand.getContent()), sofaRequest, headerMap); //for service mesh or other scene, we need to add more info from header setRequestPropertiesWithHeaderInfo(headerMap, sofaRequest); parseRequestHeader(headerMap, sofaRequest); requestCommand.setRequestObject(sofaRequest); } finally { Thread.currentThread().setContextClassLoader(oldClassLoader); } return true; } catch (Exception ex) { LOGGER.error("traceId={}, rpcId={}, Request deserializeContent exception, msg={}", traceId, rpcId, ex.getMessage(), ex); throw new DeserializationException(ex.getMessage() + ", traceId=" + traceId + ", rpcId=" + rpcId, ex); } finally { // R6:Record request deserialization time recordDeserializeRequest(requestCommand, deserializeStartTime); } } return false; }
@Test public void deserializeRequestContent() { String traceId = "traceId"; String rpcId = "rpcId"; Map<String, String> headerMap = new HashMap<>(); headerMap.put("rpc_trace_context.sofaTraceId", traceId); headerMap.put("rpc_trace_context.sofaRpcId", rpcId); RpcRequestCommand command = new RpcRequestCommand(); command.setRequestHeader(headerMap); SofaRpcSerialization sofaRpcSerialization = new SofaRpcSerialization(); boolean exp = false; try { sofaRpcSerialization.deserializeContent(command); } catch (DeserializationException e) { exp = true; Assert.assertEquals("Content of request is null, traceId=" + traceId + ", rpcId=" + rpcId, e.getMessage()); } Assert.assertTrue(exp); }
public static boolean isX64Arch() { return isX64Arch(OS_ARCH); }
@Test @EnabledOnOs(architectures = { "x64", "x86_64", "amd64" }) void isX64ArchSystemTest() { assertTrue(SystemUtil.isX64Arch()); }
protected void declareRule(final KiePMMLDroolsRule rule) { logger.trace("declareRule {}", rule); final RuleDescrBuilder ruleBuilder = builder.newRule().name(rule.getName()); if (rule.getAgendaGroup() != null) { declareAgendaGroup(ruleBuilder, rule.getAgendaGroup()); } if (rule.getActivationGroup() != null) { declareActivationGroup(ruleBuilder, rule.getActivationGroup()); } KiePMMLDescrLhsFactory.factory(ruleBuilder.lhs()).declareLhs(rule); KiePMMLDescrRhsFactory.factory(ruleBuilder).declareRhs(rule); }
@Test void declareRule() { String name = "NAME"; String statusToSet = "STATUS_TO_SET"; String patternType = "TEMPERATURE"; String agendaGroup = "agendaGroup"; String activationGroup = "activationGroup"; List<KiePMMLFieldOperatorValue> orConstraints = Arrays.asList(new KiePMMLFieldOperatorValue(patternType, BOOLEAN_OPERATOR.OR, Collections.singletonList(new KiePMMLOperatorValue(OPERATOR.LESS_THAN, 35)), null), new KiePMMLFieldOperatorValue(patternType, BOOLEAN_OPERATOR.AND, Collections.singletonList(new KiePMMLOperatorValue(OPERATOR.GREATER_THAN, 85)), null)); KiePMMLDroolsRule rule = KiePMMLDroolsRule.builder(name, statusToSet, Collections.emptyList()) .withAgendaGroup(agendaGroup) .withActivationGroup(activationGroup) .withOrConstraints(orConstraints) .build(); KiePMMLDescrRulesFactory.factory(builder).declareRule(rule); assertThat(builder.getDescr().getRules()).isNotNull(); assertThat(builder.getDescr().getRules()).hasSize(1); final RuleDescr retrieved = builder.getDescr().getRules().get(0); assertThat(retrieved.getName()).isEqualTo(name); assertThat(retrieved.getAttributes()).hasSize(2); assertThat(retrieved.getAttributes()).containsKey("agenda-group"); assertThat(retrieved.getAttributes().get("agenda-group").getValue()).isEqualTo(agendaGroup); assertThat(retrieved.getAttributes()).containsKey("activation-group"); assertThat(retrieved.getAttributes().get("activation-group").getValue()).isEqualTo(activationGroup); }
@Override @Cacheable(cacheNames = RedisKeyConstants.NOTIFY_TEMPLATE, key = "#code", unless = "#result == null") public NotifyTemplateDO getNotifyTemplateByCodeFromCache(String code) { return notifyTemplateMapper.selectByCode(code); }
@Test public void testGetNotifyTemplateByCodeFromCache() { // mock 数据 NotifyTemplateDO dbNotifyTemplate = randomPojo(NotifyTemplateDO.class); notifyTemplateMapper.insert(dbNotifyTemplate); // 准备参数 String code = dbNotifyTemplate.getCode(); // 调用 NotifyTemplateDO notifyTemplate = notifyTemplateService.getNotifyTemplateByCodeFromCache(code); // 断言 assertPojoEquals(dbNotifyTemplate, notifyTemplate); }
@Override public Interface removeInterface(VplsData vplsData, Interface iface) { requireNonNull(vplsData); requireNonNull(iface); VplsData newData = VplsData.of(vplsData); newData.removeInterface(iface); updateVplsStatus(newData, VplsData.VplsState.UPDATING); return iface; }
@Test public void testRemoveInterface() { VplsData vplsData = vplsManager.createVpls(VPLS1, NONE); vplsManager.addInterface(vplsData, V100H1); vplsManager.addInterface(vplsData, V100H2); vplsManager.removeInterface(vplsData, V100H1); vplsData = vplsStore.getVpls(VPLS1); assertNotNull(vplsData); assertEquals(vplsData.state(), UPDATING); assertEquals(1, vplsData.interfaces().size()); assertTrue(vplsData.interfaces().contains(V100H2)); }
public static Properties loadProperties(Set<ClassLoader> classLoaders, String fileName) { return loadProperties(classLoaders, fileName, false, false); }
@Test void testLoadPropertiesOneFile() throws Exception { Properties p = ConfigUtils.loadProperties(Collections.emptySet(), "properties.load", false); Properties expected = new Properties(); expected.put("a", "12"); expected.put("b", "34"); expected.put("c", "56"); assertEquals(expected, p); }
@Override public String toString() { return HeadersUtils.toString(getClass(), iterator(), size()); }
@Test public void testToString() { TestDefaultHeaders headers = newInstance(); headers.add(of("name1"), of("value1")); headers.add(of("name1"), of("value2")); headers.add(of("name2"), of("value3")); assertEquals("TestDefaultHeaders[name1: value1, name1: value2, name2: value3]", headers.toString()); headers = newInstance(); headers.add(of("name1"), of("value1")); headers.add(of("name2"), of("value2")); headers.add(of("name3"), of("value3")); assertEquals("TestDefaultHeaders[name1: value1, name2: value2, name3: value3]", headers.toString()); headers = newInstance(); headers.add(of("name1"), of("value1")); assertEquals("TestDefaultHeaders[name1: value1]", headers.toString()); headers = newInstance(); assertEquals("TestDefaultHeaders[]", headers.toString()); }
@Override public Mono<ClientResponse> filter(ClientRequest originRequest, ExchangeFunction next) { EnhancedPluginContext enhancedPluginContext = new EnhancedPluginContext(); EnhancedRequestContext enhancedRequestContext = EnhancedRequestContext.builder() .httpHeaders(originRequest.headers()) .httpMethod(originRequest.method()) .url(originRequest.url()) .build(); enhancedPluginContext.setRequest(enhancedRequestContext); enhancedPluginContext.setOriginRequest(originRequest); enhancedPluginContext.setLocalServiceInstance(pluginRunner.getLocalServiceInstance()); enhancedPluginContext.setTargetServiceInstance((ServiceInstance) MetadataContextHolder.get() .getLoadbalancerMetadata().get(LOAD_BALANCER_SERVICE_INSTANCE), originRequest.url()); // Run post enhanced plugins. pluginRunner.run(EnhancedPluginType.Client.PRE, enhancedPluginContext); // request may be changed by plugin ClientRequest request = (ClientRequest) enhancedPluginContext.getOriginRequest(); long startTime = System.currentTimeMillis(); return next.exchange(request) .doOnSuccess(response -> { enhancedPluginContext.setDelay(System.currentTimeMillis() - startTime); EnhancedResponseContext enhancedResponseContext = EnhancedResponseContext.builder() .httpStatus(response.statusCode().value()) .httpHeaders(response.headers().asHttpHeaders()) .build(); enhancedPluginContext.setResponse(enhancedResponseContext); // Run post enhanced plugins. pluginRunner.run(EnhancedPluginType.Client.POST, enhancedPluginContext); }) .doOnError(t -> { enhancedPluginContext.setDelay(System.currentTimeMillis() - startTime); enhancedPluginContext.setThrowable(t); // Run exception enhanced plugins. pluginRunner.run(EnhancedPluginType.Client.EXCEPTION, enhancedPluginContext); }) .doFinally(v -> { // Run finally enhanced plugins. pluginRunner.run(EnhancedPluginType.Client.FINALLY, enhancedPluginContext); }); }
@Test public void testRun() throws URISyntaxException { doReturn(new URI("http://0.0.0.0/")).when(clientRequest).url(); doReturn(new HttpHeaders()).when(clientRequest).headers(); doReturn(HttpMethod.GET).when(clientRequest).method(); ClientResponse.Headers headers = mock(ClientResponse.Headers.class); doReturn(headers).when(clientResponse).headers(); doReturn(HttpStatusCode.valueOf(200)).when(clientResponse).statusCode(); doReturn(Mono.just(clientResponse)).when(exchangeFunction).exchange(any()); EnhancedWebClientExchangeFilterFunction reporter = new EnhancedWebClientExchangeFilterFunction(new DefaultEnhancedPluginRunner(new ArrayList<>(), registration, null)); ClientResponse clientResponse1 = reporter.filter(clientRequest, exchangeFunction).block(); assertThat(clientResponse1).isEqualTo(clientResponse); ClientResponse clientResponse2 = reporter.filter(clientRequest, exchangeFunction).block(); assertThat(clientResponse2).isEqualTo(clientResponse); doReturn(Mono.error(new RuntimeException())).when(exchangeFunction).exchange(any()); assertThatThrownBy(() -> reporter.filter(clientRequest, exchangeFunction).block()).isInstanceOf(RuntimeException.class); }
public <T> T getStore(final StoreQueryParameters<T> storeQueryParameters) { final String storeName = storeQueryParameters.storeName(); final QueryableStoreType<T> queryableStoreType = storeQueryParameters.queryableStoreType(); final List<T> globalStore = globalStoreProvider.stores(storeName, queryableStoreType); if (!globalStore.isEmpty()) { return queryableStoreType.create(globalStoreProvider, storeName); } return queryableStoreType.create( new WrappingStoreProvider(storeProviders.values(), storeQueryParameters), storeName ); }
@Test public void shouldThrowExceptionWhenKVStoreWithPartitionDoesntExists() { final int partition = numStateStorePartitions + 1; final InvalidStateStoreException thrown = assertThrows(InvalidStateStoreException.class, () -> storeProvider.getStore( StoreQueryParameters .fromNameAndType(keyValueStore, QueryableStoreTypes.keyValueStore()) .withPartition(partition)).get("1") ); assertThat(thrown.getMessage(), equalTo(String.format("The specified partition %d for store %s does not exist.", partition, keyValueStore))); }
@SneakyThrows(ReflectiveOperationException.class) public static <T extends YamlConfiguration> T unmarshal(final File yamlFile, final Class<T> classType) throws IOException { try (BufferedReader inputStreamReader = Files.newBufferedReader(Paths.get(yamlFile.toURI()))) { T result = new Yaml(new ShardingSphereYamlConstructor(classType)).loadAs(inputStreamReader, classType); return null == result ? classType.getConstructor().newInstance() : result; } }
@Test void assertUnmarshalProperties() { Properties actual = YamlEngine.unmarshal("password: pwd", Properties.class); assertThat(actual.getProperty("password"), is("pwd")); }
@Override public Table getTable(String dbName, String tblName) { Table table; try { table = hmsOps.getTable(dbName, tblName); } catch (StarRocksConnectorException e) { LOG.error("Failed to get hive table [{}.{}.{}]", catalogName, dbName, tblName, e); throw e; } catch (Exception e) { LOG.error("Failed to get hive table [{}.{}.{}]", catalogName, dbName, tblName, e); return null; } return table; }
@Test public void testGetTable() { com.starrocks.catalog.Table table = hiveMetadata.getTable("db1", "tbl1"); HiveTable hiveTable = (HiveTable) table; Assert.assertEquals("db1", hiveTable.getDbName()); Assert.assertEquals("tbl1", hiveTable.getTableName()); Assert.assertEquals(Lists.newArrayList("col1"), hiveTable.getPartitionColumnNames()); Assert.assertEquals(Lists.newArrayList("col2"), hiveTable.getDataColumnNames()); Assert.assertEquals("hdfs://127.0.0.1:10000/hive", hiveTable.getTableLocation()); Assert.assertEquals(ScalarType.INT, hiveTable.getPartitionColumns().get(0).getType()); Assert.assertEquals(ScalarType.INT, hiveTable.getBaseSchema().get(0).getType()); Assert.assertEquals("hive_catalog", hiveTable.getCatalogName()); }
@SuppressWarnings("unchecked") public static List<Object> asList(final Object key) { final Optional<Windowed<Object>> windowed = key instanceof Windowed ? Optional.of((Windowed<Object>) key) : Optional.empty(); final Object naturalKey = windowed .map(Windowed::key) .orElse(key); if (naturalKey != null && !(naturalKey instanceof GenericKey)) { throw new IllegalArgumentException("Non generic key: " + key); } final Optional<GenericKey> genericKey = Optional.ofNullable((GenericKey) naturalKey); final List<Object> data = new ArrayList<>( genericKey.map(GenericKey::size).orElse(0) + (windowed.isPresent() ? 2 : 0) ); genericKey.ifPresent(k -> data.addAll(k.values())); windowed .map(Windowed::window) .ifPresent(wnd -> { data.add(wnd.start()); data.add(wnd.end()); }); return data; }
@Test public void shouldConvertNonWindowedKeyToList() { // Given: final GenericKey key = GenericKey.genericKey(10); // When: final List<?> result = KeyUtil.asList(key); // Then: assertThat(result, is(ImmutableList.of(10))); }
public static KafkaLogCollectClient getKafkaLogCollectClient() { return KAFKA_LOG_COLLECT_CLIENT; }
@Test public void testGetKafkaLogCollectClient() { Assertions.assertEquals(LoggingKafkaPluginDataHandler.getKafkaLogCollectClient().getClass(), KafkaLogCollectClient.class); }
@Override public SubscriptionType getSubscriptionType() { return subscriptionType; }
@Test public void testGetSubscriptionType() { SinkContext ctx = context; // make sure SinkContext can get SubscriptionType. Assert.assertEquals(ctx.getSubscriptionType(), SubscriptionType.Shared); }
public String getMethod(){ return method; }
@Test public void testRepeatedArguments() throws Exception { String url = "http://localhost/matrix.html"; // A HTTP GET request String contentEncoding = "UTF-8"; String testGetRequest = "GET " + url + "?update=yes&d=1&d=2&d=&d=&d=&d=&d=&d=1&d=2&d=1&d=&d= " + "HTTP/1.0\r\n\r\n"; HTTPSamplerBase s = getSamplerForRequest(url, testGetRequest, contentEncoding); assertEquals(HTTPConstants.GET, s.getMethod()); assertEquals(contentEncoding, s.getContentEncoding()); // Check arguments Arguments arguments = s.getArguments(); assertEquals(13, arguments.getArgumentCount()); checkArgument((HTTPArgument)arguments.getArgument(0), "update", "yes", "yes", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(1), "d", "1", "1", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(2), "d", "2", "2", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(3), "d", "", "", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(4), "d", "", "", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(5), "d", "", "", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(6), "d", "", "", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(7), "d", "", "", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(8), "d", "1", "1", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(9), "d", "2", "2", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(10), "d", "1", "1", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(11), "d", "", "", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(12), "d", "", "", contentEncoding, false); // A HTTP POST request contentEncoding = "UTF-8"; String postBody = "update=yes&d=1&d=2&d=&d=&d=&d=&d=&d=1&d=2&d=1&d=&d="; String testPostRequest = "POST " + url + " HTTP/1.0\n" + "Content-type: " + HTTPConstants.APPLICATION_X_WWW_FORM_URLENCODED + "\r\n" + "Content-length: " + getBodyLength(postBody, contentEncoding) + "\r\n" + "\r\n" + postBody; s = getSamplerForRequest(url, testPostRequest, contentEncoding); assertEquals(HTTPConstants.POST, s.getMethod()); assertFalse(s.getDoMultipart()); assertEquals(contentEncoding, s.getContentEncoding()); // Check arguments arguments = s.getArguments(); assertEquals(13, arguments.getArgumentCount()); checkArgument((HTTPArgument)arguments.getArgument(0), "update", "yes", "yes", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(1), "d", "1", "1", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(2), "d", "2", "2", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(3), "d", "", "", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(4), "d", "", "", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(5), "d", "", "", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(6), "d", "", "", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(7), "d", "", "", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(8), "d", "1", "1", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(9), "d", "2", "2", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(10), "d", "1", "1", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(11), "d", "", "", contentEncoding, false); checkArgument((HTTPArgument)arguments.getArgument(12), "d", "", "", contentEncoding, false); // A HTTP POST request, with content-type text/plain contentEncoding = "UTF-8"; postBody = "update=yes&d=1&d=2&d=&d=&d=&d=&d=&d=1&d=2&d=1&d=\uc385&d="; testPostRequest = "POST " + url + " HTTP/1.1\r\n" + "Content-type: text/plain\r\n" + "Content-length: " + getBodyLength(postBody, contentEncoding) + "\r\n" + "\r\n" + postBody; s = getSamplerForRequest(url, testPostRequest, contentEncoding); assertEquals(HTTPConstants.POST, s.getMethod()); assertFalse(s.getDoMultipart()); assertEquals(contentEncoding, s.getContentEncoding()); // Check arguments // We should have one argument, with the value equal to the post body arguments = s.getArguments(); assertEquals(1, arguments.getArgumentCount()); checkArgument((HTTPArgument)arguments.getArgument(0), "", postBody, postBody, contentEncoding, false); // A HTTP POST request, with content-type text/plain; charset=UTF-8 // The encoding should be picked up from the header we send with the request contentEncoding = "UTF-8"; postBody = "update=yes&d=1&d=2&d=&d=&d=&d=&d=&d=1&d=2&d=1&d=\uc385&d="; testPostRequest = "POST " + url + " HTTP/1.1\r\n" + "Content-type: text/plain; charset=" + contentEncoding + "\r\n" + "Content-length: " + getBodyLength(postBody, contentEncoding) + "\r\n" + "\r\n" + postBody; // Use null for url to simulate that HttpRequestHdr do not // know the encoding for the page. Specify contentEncoding, so the // request is "sent" using that encoding s = getSamplerForRequest(null, testPostRequest, contentEncoding); assertEquals(HTTPConstants.POST, s.getMethod()); assertFalse(s.getDoMultipart()); assertEquals(contentEncoding, s.getContentEncoding()); // Check arguments // We should have one argument, with the value equal to the post body arguments = s.getArguments(); assertEquals(1, arguments.getArgumentCount()); checkArgument((HTTPArgument)arguments.getArgument(0), "", postBody, postBody, contentEncoding, false); // A HTTP POST request, with content-type text/plain; charset=UTF-8 // The encoding should be picked up from the header we send with the request contentEncoding = "UTF-8"; url = "http://vmdal-hqqa9/retalixhq/GG_Implementation/ScreenEntity/ScreenEntityHTTP.aspx?Action=Save&ET=Vendor&TT=Single&Sid=1347280336092"; postBody = "<Action UIStatus=\"2\"><Vendor Id=\"9292\" HOST_ID=\"0\" VENDOR=\"9292\" TERMS_TYPE=\"No Terms\" TERMS=\"0 %\"" + " AUTO_PRICE=\"Use System Default\" VM_VENDOR_TYPE=\"DSD Vendor\" ITEM_FORMAT=\"PLU\" COST_ENTRY_SORT=\"UPC/EAN\"" + " VM_REPORT_SORT=\"UPC/EAN\" VM_ORDER_SORT=\"UPC/EAN\" VM_RECEIVING_SORT=\"UPC/EAN\" VM_MAX_BACK_ORDERS=\"99\"" + " MAX_OPEN_DAYS=\"99\" PAY_BASED_ON=\"System Cost\" ORDER_COST_DATE=\"Use System Rule\" VM_CONSIDER_FREE=\"False\"" + " VM_SHOW_DETAIL=\"False\" VM_UPDATE_COST=\"No\" RD_USE_VENDOR_CC=\"False\" BLIND_RECEIVING=\"Default\"" + " EXCLUDE_RECEIVED_COST=\"False\" PRINT_ITEM_ADJ=\"False\" PRINT_OVERALL_ADJ=\"False\" PRINT_TAX_DETAIL=\"False\"" + " BLOCK_PRICE_VIEW=\"False\" DELIVERY_STATUS=\"No Delivery\" AUTO_RECEIVE=\"False\" TARGET_GM_FLAG=\"%\"" + " MINIMUM_GM_FLAG=\"%\" MARGIN_TYPE=\"Gross Margin\" HOLD_REGULAR=\"Default\" HOLD_SPECIALS=\"Default\"" + " TRUSTING_VENDOR=\"False\" AUTO_ACCEPT=\"All\" EARLY_RCPT_AFFECTS=\"All Costs\" SBT_ELIGIBLE=\"Not eligible\"" + " SBT_REPORTING_DAY=\"Monday\" AUTO_BALANCE_FLAG=\"$\" DAX_MANAGED=\"False\" CHANGE_ID=\"QA\" CHANGE_SOURCE=\"Manual Change\"" + " ORIGINAL_SOURCE=\"Manual Change\" RECORD_STATUS=\"Add\" RECORD_STATUS_DATE=\"9/7/2012 8:34:58 AM\" VENDOR_NAME=\"test\"" + " UIStatus=\"2\"/></Action>"; testPostRequest = "POST " + url + " HTTP/1.1\r\n" + "x-requested-with: XMLHttpRequest" + "\r\n" + "Accept-Language: en-us" + "\r\n" + "Referer: http://vmdal-hqqa9/retalixhq/GG_Implementation/ScreenEntity/ScreenEntityPage.aspx?ET=Vendor&TT=Single&" + "WM=2&UID=9292&Sid=1347280331908&UITH=Blue&MUID=window_0" + "\r\n" + "Accept: */*" + "\r\n" + "Content-Type: application/x-www-form-urlencoded" + "\r\n" + "Accept-Encoding: gzip, deflate" + "\r\n" + "User-Agent: Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729;" + " .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; Tablet PC 2.0)" + "\r\n" + "Host: vmdal-hqqa9" + "\r\n" + "Content-Length: "+ getBodyLength(postBody, contentEncoding) + "\r\n" + "Proxy-Connection: Keep-Alive" + "\r\n" + "Pragma: no-cache" + "\r\n" + "Cookie: RHQ=sid=5aaeb66c-e174-4f4c-9928-83cffcc62150" + "\r\n" + "\r\n" + postBody; // Use null for url to simulate that HttpRequestHdr do not // know the encoding for the page. Specify contentEncoding, so the // request is "sent" using that encoding s = getSamplerForRequest(null, testPostRequest, contentEncoding); assertEquals(HTTPConstants.POST, s.getMethod()); assertFalse(s.getDoMultipart()); // TODO Should this be OK ? //assertEquals(contentEncoding, s.getContentEncoding()); // Check arguments // We should have one argument, with the value equal to the post body arguments = s.getArguments(); assertEquals(1, arguments.getArgumentCount()); checkArgument((HTTPArgument)arguments.getArgument(0), "", postBody, postBody, contentEncoding, false); }
public static void unzip(Path archive, Path destination) throws IOException { unzip(archive, destination, false); }
@Test public void testUnzip() throws URISyntaxException, IOException { verifyUnzip(tempFolder.getRoot().toPath()); }
@Override public String getType() { return type; }
@Test public void testType() { Assert.assertEquals(getExpectedType(), handler.getType()); }
static void checkValidTableId(String idToCheck) { if (idToCheck.length() < MIN_TABLE_ID_LENGTH) { throw new IllegalArgumentException("Table ID " + idToCheck + " cannot be empty."); } if (idToCheck.length() > MAX_TABLE_ID_LENGTH) { throw new IllegalArgumentException( "Table ID " + idToCheck + " cannot be longer than " + MAX_TABLE_ID_LENGTH + " characters."); } if (ILLEGAL_TABLE_CHARS.matcher(idToCheck).find()) { throw new IllegalArgumentException( "Table ID " + idToCheck + " is not a valid ID. Only letters, numbers, hyphens, underscores and exclamation points are allowed."); } }
@Test public void testCheckValidTableIdWhenIdIsTooLong() { assertThrows( IllegalArgumentException.class, () -> checkValidTableId("really-really-really-really-long-table-id")); }
public void changeFieldType(final CustomFieldMapping customMapping, final Set<String> indexSetsIds, final boolean rotateImmediately) { checkFieldTypeCanBeChanged(customMapping.fieldName()); checkType(customMapping); checkAllIndicesSupportFieldTypeChange(customMapping.fieldName(), indexSetsIds); for (String indexSetId : indexSetsIds) { try { indexSetService.get(indexSetId).ifPresent(indexSetConfig -> { var updatedIndexSetConfig = storeMapping(customMapping, indexSetConfig); if (rotateImmediately) { updatedIndexSetConfig.ifPresent(this::cycleIndexSet); } }); } catch (Exception ex) { LOG.error("Failed to update field type in index set : " + indexSetId, ex); throw ex; } } }
@Test void testSavesIndexSetWithNewMappingAndPreviousMappings() { doReturn(Optional.of(existingIndexSet)).when(indexSetService).get("existing_index_set"); toTest.changeFieldType(newCustomMapping, Set.of("existing_index_set"), false); verify(mongoIndexSetService).save( existingIndexSet.toBuilder() .customFieldMappings(new CustomFieldMappings(Set.of(existingCustomFieldMapping, newCustomMapping))) .build()); verifyNoInteractions(existingMongoIndexSet); }
@Override public MapSettings setProperty(String key, String value) { return (MapSettings) super.setProperty(key, value); }
@Test public void test_get_bad_double() { Settings settings = new MapSettings(); settings.setProperty("foo", "bar"); assertThatThrownBy(() -> settings.getDouble("foo")) .isInstanceOf(IllegalStateException.class) .hasMessage("The property 'foo' is not a double value"); }
protected TaskConfig buildTaskConfig(TaskConfig config) { TaskConfig taskExecConfig = new TaskConfig(); for (Property property : config.list()) { taskExecConfig.add(getExecProperty(config, property)); } return taskExecConfig; }
@Test public void shouldReturnDefaultValueInExecConfigWhenConfigValueIsEmptyString() { TaskConfig defaultTaskConfig = new TaskConfig(); String propertyName = "URL"; String defaultValue = "ABC.TXT"; Map<String, Map<String, String>> configMap = new HashMap<>(); HashMap<String, String> configValue = new HashMap<>(); configValue.put("value", ""); configMap.put(propertyName, configValue); PluggableTask task = mock(PluggableTask.class); when(task.getPluginConfiguration()).thenReturn(new PluginConfiguration()); when(task.configAsMap()).thenReturn(configMap); PluggableTaskBuilder taskBuilder = new PluggableTaskBuilder(runIfConfigs, cancelBuilder, task, TEST_PLUGIN_ID, "test-directory"); defaultTaskConfig.addProperty(propertyName).withDefault(defaultValue); TaskConfig config = taskBuilder.buildTaskConfig(defaultTaskConfig); assertThat(config.getValue(propertyName)).isEqualTo(defaultValue); }
public static UDoWhileLoop create(UStatement body, UExpression condition) { return new AutoValue_UDoWhileLoop((USimpleStatement) body, condition); }
@Test public void serialization() { SerializableTester.reserializeAndAssert( UDoWhileLoop.create( UBlock.create( UExpressionStatement.create( UAssign.create( ULocalVarIdent.create("old"), UMethodInvocation.create( UMemberSelect.create( UFreeIdent.create("str"), "indexOf", UMethodType.create( UPrimitiveType.INT, UPrimitiveType.INT, UPrimitiveType.INT)), ULiteral.charLit(' '), UBinary.create( Kind.PLUS, ULocalVarIdent.create("old"), ULiteral.intLit(1)))))), UParens.create( UBinary.create( Kind.NOT_EQUAL_TO, ULocalVarIdent.create("old"), ULiteral.intLit(-1))))); }
public static PTransformMatcher classEqualTo(Class<? extends PTransform> clazz) { return new EqualClassPTransformMatcher(clazz); }
@Test public void classEqualToMatchesSameClass() { PTransformMatcher matcher = PTransformMatchers.classEqualTo(ParDo.SingleOutput.class); AppliedPTransform<?, ?, ?> application = getAppliedTransform( ParDo.of( new DoFn<KV<String, Integer>, Integer>() { @ProcessElement public void doStuff(ProcessContext ctxt) {} })); assertThat(matcher.matches(application), is(true)); }
@Override public AppResponse process(Flow flow, AppRequest params) { var result = digidClient.getWidstatus(appSession.getWidRequestId()); switch(result.get("status").toString()){ case "NO_DOCUMENTS": appSession.setRdaSessionStatus("NO_DOCUMENTS"); appSession.setBrpIdentifier(result.get("brp_identifier").toString()); appSessionService.save(appSession); return new StatusResponse("NO_DOCUMENTS"); case "PENDING": setValid(false); // Do not progress to next state return new StatusResponse("PENDING"); case "NOK": return new NokResponse(); } digidClient.remoteLog("867", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId(), HIDDEN, true)); appSession.setRdaSessionStatus("DOCUMENTS_RECEIVED"); Map<String, String> rdaSession = rdaClient.startSession(returnUrl + "/iapi/rda/confirm", appSession.getId(), params.getIpAddress(), result.get("travel_documents"), result.get("driving_licences")); if (rdaSession.isEmpty()) { digidClient.remoteLog("873", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId(), HIDDEN, true)); return new NokResponse(); } appSession.setConfirmSecret(rdaSession.get("confirmSecret")); appSession.setUrl(rdaSession.get("url")); appSession.setRdaSessionId(rdaSession.get("sessionId")); appSession.setRdaSessionTimeoutInSeconds(rdaSession.get("expiration")); appSession.setRdaSessionStatus("SCANNING"); digidClient.remoteLog("868", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId(), HIDDEN, true)); return new RdaResponse(appSession.getUrl(), appSession.getRdaSessionId()); }
@Test void processWidstatusPending(){ when(digidClientMock.getWidstatus(mockedAppSession.getWidRequestId())).thenReturn(invalidDigidClientResponsePending); AppResponse appResponse = rdaPolling.process(mockedFlow, mockedAbstractAppRequest); assertEquals("PENDING", ((StatusResponse)appResponse).getStatus()); }
@Override public ValidationResult validate(Object value) { ValidationResult result = super.validate(value); if (result instanceof ValidationResult.ValidationPassed) { final String sValue = (String) value; if (sValue != null && sValue.length() > maxLength) { result = new ValidationResult.ValidationFailed("Value is longer than " + maxLength + " characters!"); } } return result; }
@Test public void testValidateNullValue() { assertThat(new LimitedOptionalStringValidator(1).validate(null)) .isInstanceOf(ValidationResult.ValidationPassed.class); }
static boolean isUserDefinedRealTask(Task task) { return isUserDefinedTask(task) && isRealTask(task); }
@Test public void testIsUserDefinedRealTask() { when(task.getTaskType()).thenReturn(Constants.MAESTRO_TASK_NAME); when(task.getSeq()).thenReturn(1); Assert.assertTrue(TaskHelper.isUserDefinedRealTask(task)); when(task.getTaskType()).thenReturn("TEST_TASK"); Assert.assertFalse(TaskHelper.isUserDefinedRealTask(task)); when(task.getTaskType()).thenReturn(Constants.MAESTRO_TASK_NAME); when(task.getSeq()).thenReturn(-1); Assert.assertFalse(TaskHelper.isUserDefinedRealTask(task)); }
public void decode(ByteBuf buffer) { boolean last; int statusCode; while (true) { switch(state) { case READ_COMMON_HEADER: if (buffer.readableBytes() < SPDY_HEADER_SIZE) { return; } int frameOffset = buffer.readerIndex(); int flagsOffset = frameOffset + SPDY_HEADER_FLAGS_OFFSET; int lengthOffset = frameOffset + SPDY_HEADER_LENGTH_OFFSET; buffer.skipBytes(SPDY_HEADER_SIZE); boolean control = (buffer.getByte(frameOffset) & 0x80) != 0; int version; int type; if (control) { // Decode control frame common header version = getUnsignedShort(buffer, frameOffset) & 0x7FFF; type = getUnsignedShort(buffer, frameOffset + SPDY_HEADER_TYPE_OFFSET); streamId = 0; // Default to session Stream-ID } else { // Decode data frame common header version = spdyVersion; // Default to expected version type = SPDY_DATA_FRAME; streamId = getUnsignedInt(buffer, frameOffset); } flags = buffer.getByte(flagsOffset); length = getUnsignedMedium(buffer, lengthOffset); // Check version first then validity if (version != spdyVersion) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SPDY Version"); } else if (!isValidFrameHeader(streamId, type, flags, length)) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid Frame Error"); } else { state = getNextState(type, length); } break; case READ_DATA_FRAME: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readDataFrame(streamId, hasFlag(flags, SPDY_DATA_FLAG_FIN), Unpooled.buffer(0)); break; } // Generate data frames that do not exceed maxChunkSize int dataLength = Math.min(maxChunkSize, length); // Wait until entire frame is readable if (buffer.readableBytes() < dataLength) { return; } ByteBuf data = buffer.alloc().buffer(dataLength); data.writeBytes(buffer, dataLength); length -= dataLength; if (length == 0) { state = State.READ_COMMON_HEADER; } last = length == 0 && hasFlag(flags, SPDY_DATA_FLAG_FIN); delegate.readDataFrame(streamId, last, data); break; case READ_SYN_STREAM_FRAME: if (buffer.readableBytes() < 10) { return; } int offset = buffer.readerIndex(); streamId = getUnsignedInt(buffer, offset); int associatedToStreamId = getUnsignedInt(buffer, offset + 4); byte priority = (byte) (buffer.getByte(offset + 8) >> 5 & 0x07); last = hasFlag(flags, SPDY_FLAG_FIN); boolean unidirectional = hasFlag(flags, SPDY_FLAG_UNIDIRECTIONAL); buffer.skipBytes(10); length -= 10; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_STREAM Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynStreamFrame(streamId, associatedToStreamId, priority, last, unidirectional); } break; case READ_SYN_REPLY_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_REPLY Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynReplyFrame(streamId, last); } break; case READ_RST_STREAM_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (streamId == 0 || statusCode == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid RST_STREAM Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readRstStreamFrame(streamId, statusCode); } break; case READ_SETTINGS_FRAME: if (buffer.readableBytes() < 4) { return; } boolean clear = hasFlag(flags, SPDY_SETTINGS_CLEAR); numSettings = getUnsignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); length -= 4; // Validate frame length against number of entries. Each ID/Value entry is 8 bytes. if ((length & 0x07) != 0 || length >> 3 != numSettings) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SETTINGS Frame"); } else { state = State.READ_SETTING; delegate.readSettingsFrame(clear); } break; case READ_SETTING: if (numSettings == 0) { state = State.READ_COMMON_HEADER; delegate.readSettingsEnd(); break; } if (buffer.readableBytes() < 8) { return; } byte settingsFlags = buffer.getByte(buffer.readerIndex()); int id = getUnsignedMedium(buffer, buffer.readerIndex() + 1); int value = getSignedInt(buffer, buffer.readerIndex() + 4); boolean persistValue = hasFlag(settingsFlags, SPDY_SETTINGS_PERSIST_VALUE); boolean persisted = hasFlag(settingsFlags, SPDY_SETTINGS_PERSISTED); buffer.skipBytes(8); --numSettings; delegate.readSetting(id, value, persistValue, persisted); break; case READ_PING_FRAME: if (buffer.readableBytes() < 4) { return; } int pingId = getSignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); state = State.READ_COMMON_HEADER; delegate.readPingFrame(pingId); break; case READ_GOAWAY_FRAME: if (buffer.readableBytes() < 8) { return; } int lastGoodStreamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); state = State.READ_COMMON_HEADER; delegate.readGoAwayFrame(lastGoodStreamId, statusCode); break; case READ_HEADERS_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid HEADERS Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readHeadersFrame(streamId, last); } break; case READ_WINDOW_UPDATE_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); int deltaWindowSize = getUnsignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (deltaWindowSize == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid WINDOW_UPDATE Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readWindowUpdateFrame(streamId, deltaWindowSize); } break; case READ_HEADER_BLOCK: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readHeaderBlockEnd(); break; } if (!buffer.isReadable()) { return; } int compressedBytes = Math.min(buffer.readableBytes(), length); ByteBuf headerBlock = buffer.alloc().buffer(compressedBytes); headerBlock.writeBytes(buffer, compressedBytes); length -= compressedBytes; delegate.readHeaderBlock(headerBlock); break; case DISCARD_FRAME: int numBytes = Math.min(buffer.readableBytes(), length); buffer.skipBytes(numBytes); length -= numBytes; if (length == 0) { state = State.READ_COMMON_HEADER; break; } return; case FRAME_ERROR: buffer.skipBytes(buffer.readableBytes()); return; default: throw new Error("Shouldn't reach here."); } } }
@Test public void testInvalidSpdySynReplyFrameLength() throws Exception { short type = 2; byte flags = 0; int length = 0; // invalid length ByteBuf buf = Unpooled.buffer(SPDY_HEADER_SIZE + length); encodeControlFrameHeader(buf, type, flags, length); decoder.decode(buf); verify(delegate).readFrameError(anyString()); assertFalse(buf.isReadable()); buf.release(); }
public boolean matchStage(StageConfigIdentifier stageIdentifier, StageEvent event) { return this.event.include(event) && appliesTo(stageIdentifier.getPipelineName(), stageIdentifier.getStageName()); }
@Test void shouldMatchFixedStage() { NotificationFilter filter = new NotificationFilter("cruise", "dev", StageEvent.Fixed, false); assertThat(filter.matchStage(new StageConfigIdentifier("cruise", "dev"), StageEvent.Fixed)).isTrue(); }
@Override public void process(String host, String value, ApplicationId applicationId, ApplicationSubmissionContext submissionContext) { submissionContext.setNodeLabelExpression(value); }
@Test public void testNodeLabelProcessor() { ContextProcessor nodeLabelProcessor = new NodeLabelProcessor(); ApplicationId app = ApplicationId.newInstance(123456, 111); ApplicationSubmissionContext applicationSubmissionContext = mock(ApplicationSubmissionContext.class); when(applicationSubmissionContext.getApplicationId()).thenReturn(app); nodeLabelProcessor.process("host.cluster2.com", "foo", app, applicationSubmissionContext); verify(applicationSubmissionContext, times(1)) .setNodeLabelExpression("foo"); }
public void processTimeout() { // this is only for jobs which transaction is not started. // if transaction is started, global transaction manager will handle the timeout. writeLock(); try { if (state != JobState.PENDING) { return; } if (!isTimeout()) { return; } unprotectedExecuteCancel(new FailMsg(FailMsg.CancelType.TIMEOUT, "loading timeout to cancel"), false); logFinalOperation(); } finally { writeUnlock(); } }
@Test public void testProcessTimeout(@Mocked GlobalStateMgr globalStateMgr, @Mocked EditLog editLog) { LoadJob loadJob = new BrokerLoadJob(); Deencapsulation.setField(loadJob, "timeoutSecond", 0); new Expectations() { { globalStateMgr.getEditLog(); minTimes = 0; result = editLog; } }; loadJob.processTimeout(); Assert.assertEquals(JobState.CANCELLED, loadJob.getState()); }
@SuppressWarnings("deprecation") File convertToFile(URL url) { String protocol = url.getProtocol(); if ("file".equals(protocol)) { return new File(URLDecoder.decode(url.getFile())); } else { addInfo("URL [" + url + "] is not of type file"); return null; } }
@Test // See http://jira.qos.ch/browse/LBCORE-119 public void fileToURLAndBack() throws MalformedURLException { File file = new File("a b.xml"); URL url = file.toURI().toURL(); ConfigurationWatchList cwl = new ConfigurationWatchList(); File back = cwl.convertToFile(url); assertEquals(file.getName(), back.getName()); }
boolean dropSession(final String clientId, boolean removeSessionState) { LOG.debug("Disconnecting client: {}", clientId); if (clientId == null) { return false; } final Session client = pool.get(clientId); if (client == null) { LOG.debug("Client {} not found, nothing disconnected", clientId); return false; } client.closeImmediately(); if (removeSessionState) { purgeSessionState(client); } LOG.debug("Client {} successfully disconnected from broker", clientId); return true; }
@Test public void testDropSessionWithNullClientId() { assertFalse(sut.dropSession(null, ANY_BOOLEAN), "Can't be successful when null clientId is passed"); }
@Override public void destroy() { if (this.sqsClient != null) { try { this.sqsClient.shutdown(); } catch (Exception e) { log.error("Failed to shutdown SQS client during destroy()", e); } } }
@Test void givenSqsClientIsNull_whenDestroy_thenVerifyNoInteractions() { ReflectionTestUtils.setField(node, "sqsClient", null); node.destroy(); then(sqsClientMock).shouldHaveNoInteractions(); }
@Override public String getMethod() { return PATH; }
@Test public void testGetChatMenuButtonAsWebApp() { SetChatMenuButton setChatMenuButton = SetChatMenuButton .builder() .chatId("123456") .menuButton(MenuButtonWebApp .builder() .text("Web app text") .webAppInfo(WebAppInfo.builder().url("My url").build()) .build()) .build(); assertEquals("setChatMenuButton", setChatMenuButton.getMethod()); assertDoesNotThrow(setChatMenuButton::validate); }
public static <EventT> Write<EventT> write() { return new AutoValue_JmsIO_Write.Builder<EventT>().build(); }
@Test public void testWriteMessageWithRetryPolicyReachesLimit() throws Exception { String messageText = "text"; int maxPublicationAttempts = 2; List<String> data = Collections.singletonList(messageText); RetryConfiguration retryConfiguration = RetryConfiguration.create(maxPublicationAttempts, null, null); WriteJmsResult<String> output = pipeline .apply(Create.of(data)) .apply( JmsIO.<String>write() .withConnectionFactory(connectionFactory) .withValueMapper( (SerializableBiFunction<String, Session, Message>) (s, session) -> { throw new JmsIOException("Error!!"); }) .withRetryConfiguration(retryConfiguration) .withQueue(QUEUE) .withUsername(USERNAME) .withPassword(PASSWORD)); PAssert.that(output.getFailedMessages()).containsInAnyOrder(messageText); PipelineResult pipelineResult = pipeline.run(); MetricQueryResults metrics = pipelineResult .metrics() .queryMetrics( MetricsFilter.builder() .addNameFilter( MetricNameFilter.named( JMS_IO_PRODUCER_METRIC_NAME, PUBLICATION_RETRIES_METRIC_NAME)) .build()); assertThat( metrics.getCounters(), contains( allOf( hasProperty("attempted", is((long) maxPublicationAttempts)), hasProperty( "key", hasToString( containsString( String.format( "%s:%s", JMS_IO_PRODUCER_METRIC_NAME, PUBLICATION_RETRIES_METRIC_NAME))))))); assertQueueIsEmpty(); }
public void runPickle(Pickle pickle) { try { StepTypeRegistry stepTypeRegistry = createTypeRegistryForPickle(pickle); snippetGenerators = createSnippetGeneratorsForPickle(stepTypeRegistry); // Java8 step definitions will be added to the glue here buildBackendWorlds(); glue.prepareGlue(stepTypeRegistry); TestCase testCase = createTestCaseForPickle(pickle); testCase.run(bus); } finally { glue.removeScenarioScopedGlue(); disposeBackendWorlds(); } }
@Test void scenario_hooks_not_executed_for_empty_pickles() { HookDefinition beforeHook = createHook(); HookDefinition afterHook = createHook(); HookDefinition beforeStepHook = createHook(); HookDefinition afterStepHook = createHook(); TestRunnerSupplier runnerSupplier = new TestRunnerSupplier(bus, runtimeOptions) { @Override public void loadGlue(Glue glue, List<URI> gluePaths) { glue.addBeforeHook(beforeHook); glue.addAfterHook(afterHook); glue.addBeforeStepHook(beforeStepHook); glue.addAfterStepHook(afterStepHook); } }; runnerSupplier.get().runPickle(createEmptyPickle()); verify(beforeHook, never()).execute(any(TestCaseState.class)); verify(afterStepHook, never()).execute(any(TestCaseState.class)); verify(afterHook, never()).execute(any(TestCaseState.class)); }
public static NetworkInterface findNetworkInterface() { List<NetworkInterface> validNetworkInterfaces = emptyList(); try { validNetworkInterfaces = getValidNetworkInterfaces(); } catch (Throwable e) { logger.warn(e); } NetworkInterface result = null; // Try to find the preferred one for (NetworkInterface networkInterface : validNetworkInterfaces) { if (isPreferredNetworkInterface(networkInterface)) { result = networkInterface; break; } } if (result == null) { // If not found, try to get the first one for (NetworkInterface networkInterface : validNetworkInterfaces) { Enumeration<InetAddress> addresses = networkInterface.getInetAddresses(); while (addresses.hasMoreElements()) { Optional<InetAddress> addressOp = toValidAddress(addresses.nextElement()); if (addressOp.isPresent()) { try { if (addressOp.get().isReachable(100)) { return networkInterface; } } catch (IOException e) { // ignore } } } } } if (result == null) { result = first(validNetworkInterfaces); } return result; }
@Test void testIgnoreAllInterfaces() { // store the origin ignored interfaces String originIgnoredInterfaces = this.getIgnoredInterfaces(); try { // ignore all interfaces this.setIgnoredInterfaces(".*"); assertNull(NetUtils.findNetworkInterface()); } finally { // recover the origin ignored interfaces this.setIgnoredInterfaces(originIgnoredInterfaces); } }
@Override public void validate(final Host bookmark, final LoginCallback prompt, final LoginOptions options) throws ConnectionCanceledException, LoginFailureException { if(log.isDebugEnabled()) { log.debug(String.format("Validate login credentials for %s", bookmark)); } final Credentials credentials = bookmark.getCredentials(); if(credentials.isPublicKeyAuthentication()) { if(!credentials.getIdentity().attributes().getPermission().isReadable()) { log.warn(String.format("Prompt to select identity file not readable %s", credentials.getIdentity())); credentials.setIdentity(prompt.select(credentials.getIdentity())); } } if(options.keychain) { if(options.password) { if(StringUtils.isBlank(credentials.getPassword())) { final String password = keychain.findLoginPassword(bookmark); if(StringUtils.isNotBlank(password)) { if(log.isInfoEnabled()) { log.info(String.format("Fetched password from keychain for %s", bookmark)); } // No need to reinsert found password to the keychain. credentials.setSaved(false); credentials.setPassword(password); } } } if(options.token) { if(StringUtils.isBlank(credentials.getToken())) { final String token = keychain.findLoginToken(bookmark); if(StringUtils.isNotBlank(token)) { if(log.isInfoEnabled()) { log.info(String.format("Fetched token from keychain for %s", bookmark)); } // No need to reinsert found token to the keychain. credentials.setSaved(false); credentials.setToken(token); } } } if(options.publickey) { final String passphrase = keychain.findPrivateKeyPassphrase(bookmark); if(StringUtils.isNotBlank(passphrase)) { if(log.isInfoEnabled()) { log.info(String.format("Fetched private key passphrase from keychain for %s", bookmark)); } // No need to reinsert found token to the keychain. credentials.setSaved(false); credentials.setIdentityPassphrase(passphrase); } } if(options.oauth) { final OAuthTokens tokens = keychain.findOAuthTokens(bookmark); if(tokens.validate()) { if(log.isInfoEnabled()) { log.info(String.format("Fetched OAuth token from keychain for %s", bookmark)); } // No need to reinsert found token to the keychain. credentials.setSaved(tokens.isExpired()); credentials.setOauth(tokens); } } } if(!credentials.validate(bookmark.getProtocol(), options)) { final CredentialsConfigurator configurator = bookmark.getProtocol().getFeature(CredentialsConfigurator.class); if(log.isDebugEnabled()) { log.debug(String.format("Auto configure credentials with %s", configurator)); } bookmark.setCredentials(configurator.configure(bookmark)); } if(!credentials.validate(bookmark.getProtocol(), options)) { final StringAppender message = new StringAppender(); if(options.password) { message.append(MessageFormat.format(LocaleFactory.localizedString( "Login {0} with username and password", "Credentials"), BookmarkNameProvider.toString(bookmark))); } if(options.publickey) { message.append(LocaleFactory.localizedString( "Select the private key in PEM or PuTTY format", "Credentials")); } message.append(LocaleFactory.localizedString("No login credentials could be found in the Keychain", "Credentials")); this.prompt(bookmark, message.toString(), prompt, options); } }
@Test public void testFindPasswordSftp() throws Exception { final AtomicBoolean keychain = new AtomicBoolean(false); KeychainLoginService l = new KeychainLoginService(new DisabledPasswordStore() { @Override public String findLoginPassword(final Host bookmark) { keychain.set(true); return "P"; } } ); final Credentials credentials = new Credentials(); credentials.setUsername("u"); final Host host = new Host(new TestProtocol(), "test.cyberduck.ch", credentials); l.validate(host, new DisabledLoginCallback(), new LoginOptions(host.getProtocol())); assertTrue(keychain.get()); assertFalse(host.getCredentials().isSaved()); assertEquals("P", host.getCredentials().getPassword()); }
@Override public int hashCode() { int hash = 3; hash = 97 * hash + (this.qualifyingNames != null ? this.qualifyingNames.hashCode() : 0); hash = 97 * hash + (this.resultType != null ? this.resultType.toString().hashCode() : 0); return hash; }
@Test public void testHashCodeWithNullQualifyingNames() { TypeMirror resultType = new TestTypeMirror( "someType" ); SelectionParameters params = new SelectionParameters( null, null, resultType, null ); assertThat( params.hashCode() ) .as( "QualifyingNames null hashCode" ) .isEqualTo( 3 * 97 * 97 + "someType".hashCode() ); }
@SuppressWarnings("unchecked") public static <T extends Message> T newMessageByJavaClassName(final String className, final byte[] bs) { final MethodHandle handle = PARSE_METHODS_4J.get(className); if (handle == null) { throw new MessageClassNotFoundException(className + " not found"); } try { return (T) handle.invoke(bs); } catch (Throwable t) { throw new SerializationException(t); } }
@Test public void testNewMessageByJavaClassName() { SnapshotMeta meta = SnapshotMeta.newBuilder().setLastIncludedIndex(99).setLastIncludedTerm(1).build(); SnapshotMeta pMeta = ProtobufMsgFactory .newMessageByJavaClassName(meta.getClass().getName(), meta.toByteArray()); assertNotNull(pMeta); assertNotSame(pMeta, meta); assertEquals(pMeta, meta); }
@Override public ValidationResult validate(Object value) { if (value == null || value instanceof String) { return new ValidationResult.ValidationPassed(); } else { return new ValidationResult.ValidationFailed("Value \"" + value + "\" is not a valid string!"); } }
@Test public void validateString() { assertThat(validator.validate("foobar")).isInstanceOf(ValidationResult.ValidationPassed.class); }
public static NamespaceName get(String tenant, String namespace) { validateNamespaceName(tenant, namespace); return get(tenant + '/' + namespace); }
@Test(expectedExceptions = IllegalArgumentException.class) public void namespace_nullTenant2() { NamespaceName.get(null, "cluster", "namespace"); }
@Override public T build(ConfigurationSourceProvider provider, String path) throws IOException, ConfigurationException { try (InputStream input = provider.open(requireNonNull(path))) { final JsonNode node = mapper.readTree(createParser(input)); if (node == null) { throw ConfigurationParsingException .builder("Configuration at " + path + " must not be empty") .build(path); } return build(node, path); } catch (JsonParseException e) { throw ConfigurationParsingException .builder("Malformed " + formatName) .setCause(e) .setLocation(e.getLocation()) .setDetail(e.getMessage()) .build(path); } }
@Test void overridesArrayPropertiesWithIndices() throws Exception { System.setProperty("dw.servers[0].port", "7000"); System.setProperty("dw.servers[2].port", "9000"); final Example example = factory.build(configurationSourceProvider, validFile); assertThat(example.getServers()) .hasSize(3) .satisfies(servers -> assertThat(servers).element(0).extracting(ExampleServer::getPort).isEqualTo(7000)) .satisfies(servers -> assertThat(servers).element(2).extracting(ExampleServer::getPort).isEqualTo(9000)); }
static ClockImpl createClock() { String clockImplClassName = System.getProperty(ClockProperties.HAZELCAST_CLOCK_IMPL); if (clockImplClassName != null) { try { return ClassLoaderUtil.newInstance(null, clockImplClassName); } catch (Exception e) { throw rethrow(e); } } String clockOffset = System.getProperty(ClockProperties.HAZELCAST_CLOCK_OFFSET); long offset = 0L; if (clockOffset != null) { try { offset = Long.parseLong(clockOffset); } catch (NumberFormatException e) { throw rethrow(e); } } if (offset != 0L) { return new SystemOffsetClock(offset); } return new SystemClock(); }
@Test public void testCreateClock_withClockOffset() { setClockOffset(30); Clock.ClockImpl clock = Clock.createClock(); assertInstanceOf(Clock.SystemOffsetClock.class, clock); }
public static boolean startsWithIgnoreCase(String str, String prefix) { if (str == null || prefix == null || str.length() < prefix.length()) { return false; } // return str.substring(0, prefix.length()).equalsIgnoreCase(prefix); return str.regionMatches(true, 0, prefix, 0, prefix.length()); }
@Test void testStartsWithIgnoreCase() { assertTrue(startsWithIgnoreCase("dubbo.application.name", "dubbo.application.")); assertTrue(startsWithIgnoreCase("dubbo.Application.name", "dubbo.application.")); assertTrue(startsWithIgnoreCase("Dubbo.application.name", "dubbo.application.")); }
public static ExecuteEnv getInstance() { return INSTANCE; }
@Test public void testGetInstance() { Set<Thread> tds = new HashSet<Thread>(); for (int i = 0; i < THREAD_MAX_NUM; i++) { Thread td = new Thread(new MyTest(i, oids)); tds.add(td); td.start(); } for (Thread td : tds) { try { td.join(); } catch (InterruptedException e) { e.printStackTrace(); } } for (int i = 1; i < THREAD_MAX_NUM; i++) { Assert.assertEquals(oids[i - 1], oids[i]); } }
@Override public ConfigInfoBetaWrapper findConfigInfo4Beta(final String dataId, final String group, final String tenant) { String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; try { ConfigInfoBetaMapper configInfoBetaMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(), TableConstant.CONFIG_INFO_BETA); return this.jt.queryForObject(configInfoBetaMapper.select( Arrays.asList("id", "data_id", "group_id", "tenant_id", "app_name", "content", "beta_ips", "encrypted_data_key", "gmt_modified"), Arrays.asList("data_id", "group_id", "tenant_id")), new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. return null; } catch (CannotGetJdbcConnectionException e) { LogUtil.FATAL_LOG.error("[db-error] " + e, e); throw e; } }
@Test void testFindConfigInfo4Beta() { String dataId = "dataId456789"; String group = "group4567"; String tenant = "tenant56789o0"; //mock exist beta ConfigInfoBetaWrapper mockedConfigInfoStateWrapper = new ConfigInfoBetaWrapper(); mockedConfigInfoStateWrapper.setDataId(dataId); mockedConfigInfoStateWrapper.setGroup(group); mockedConfigInfoStateWrapper.setTenant(tenant); mockedConfigInfoStateWrapper.setId(123456L); mockedConfigInfoStateWrapper.setLastModified(System.currentTimeMillis()); when(jdbcTemplate.queryForObject(anyString(), eq(new Object[] {dataId, group, tenant}), eq(CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER))).thenReturn(mockedConfigInfoStateWrapper); ConfigInfoBetaWrapper configInfo4BetaReturn = externalConfigInfoBetaPersistService.findConfigInfo4Beta(dataId, group, tenant); assertEquals(mockedConfigInfoStateWrapper, configInfo4BetaReturn); //mock query throw CannotGetJdbcConnectionException when(jdbcTemplate.queryForObject(anyString(), eq(new Object[] {dataId, group, tenant}), eq(CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER))).thenThrow(new CannotGetJdbcConnectionException("mock fail11111")); try { externalConfigInfoBetaPersistService.findConfigInfo4Beta(dataId, group, tenant); assertTrue(false); } catch (Exception exception) { assertEquals("mock fail11111", exception.getMessage()); } //mock query throw EmptyResultDataAccessException when(jdbcTemplate.queryForObject(anyString(), eq(new Object[] {dataId, group, tenant}), eq(CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER))).thenThrow(new EmptyResultDataAccessException(1)); ConfigInfoBetaWrapper configInfo4BetaNull = externalConfigInfoBetaPersistService.findConfigInfo4Beta(dataId, group, tenant); assertNull(configInfo4BetaNull); }
static @NonNull CloudStringReader of(final @NonNull CommandInput commandInput) { return new CloudStringReader(commandInput); }
@Test void testAllThreeWordsRead() throws CommandSyntaxException { // Arrange final CommandInput commandInput = CommandInput.of("hello some worlds"); final StringReader stringReader = CloudStringReader.of(commandInput); // Act final String readString1 = stringReader.readString(); stringReader.skipWhitespace(); final String readString2 = stringReader.readString(); stringReader.skipWhitespace(); final String readString3 = stringReader.readString(); // Assert assertThat(readString1).isEqualTo("hello"); assertThat(readString2).isEqualTo("some"); assertThat(readString3).isEqualTo("worlds"); assertThat(commandInput.isEmpty()).isTrue(); }
@Override public String toString() { return String.format("ThriftMetaData(thriftClassName: %s, descriptor: %s)", thriftClassName, descriptor); }
@Test public void testToStringDoesNotThrow() { StructType descriptor = new StructType(new ArrayList<ThriftField>(), StructOrUnionType.STRUCT); ThriftMetaData tmd = new ThriftMetaData("non existent class!!!", descriptor); assertEquals( ("ThriftMetaData(thriftClassName: non existent class!!!, descriptor: {\n" + " \"id\" : \"STRUCT\",\n" + " \"children\" : [ ],\n" + " \"structOrUnionType\" : \"STRUCT\",\n" + " \"logicalTypeAnnotation\" : null\n" + "})") .replace("\n", System.lineSeparator()), tmd.toString()); tmd = new ThriftMetaData("non existent class!!!", null); assertEquals("ThriftMetaData(thriftClassName: non existent class!!!, descriptor: null)", tmd.toString()); }
public static void checkMock(Class<?> interfaceClass, AbstractInterfaceConfig config) { String mock = config.getMock(); if (ConfigUtils.isEmpty(mock)) { return; } String normalizedMock = MockInvoker.normalizeMock(mock); if (normalizedMock.startsWith(RETURN_PREFIX)) { normalizedMock = normalizedMock.substring(RETURN_PREFIX.length()).trim(); try { // Check whether the mock value is legal, if it is illegal, throw exception MockInvoker.parseMockValue(normalizedMock); } catch (Exception e) { throw new IllegalStateException( "Illegal mock return in <dubbo:service/reference ... " + "mock=\"" + mock + "\" />"); } } else if (normalizedMock.startsWith(THROW_PREFIX)) { normalizedMock = normalizedMock.substring(THROW_PREFIX.length()).trim(); if (ConfigUtils.isNotEmpty(normalizedMock)) { try { // Check whether the mock value is legal MockInvoker.getThrowable(normalizedMock); } catch (Exception e) { throw new IllegalStateException( "Illegal mock throw in <dubbo:service/reference ... " + "mock=\"" + mock + "\" />"); } } } else { // Check whether the mock class is a implementation of the interfaceClass, and if it has a default // constructor MockInvoker.getMockObject(config.getScopeModel().getExtensionDirector(), normalizedMock, interfaceClass); } }
@Test void checkMock2() { Assertions.assertThrows(IllegalStateException.class, () -> { InterfaceConfig interfaceConfig = new InterfaceConfig(); interfaceConfig.setMock(GreetingMock1.class.getName()); ConfigValidationUtils.checkMock(Greeting.class, interfaceConfig); }); }
@Udf(description = "Returns the tangent of an INT value") public Double tan( @UdfParameter( value = "value", description = "The value in radians to get the tangent of." ) final Integer value ) { return tan(value == null ? null : value.doubleValue()); }
@Test public void shouldHandleLessThanNegative2Pi() { assertThat(udf.tan(-9.1), closeTo(0.33670052643287396, 0.000000000000001)); assertThat(udf.tan(-6.3), closeTo(-0.016816277694182057, 0.000000000000001)); assertThat(udf.tan(-7), closeTo(-0.8714479827243188, 0.000000000000001)); assertThat(udf.tan(-7L), closeTo(-0.8714479827243188, 0.000000000000001)); }
@Override public DescriptiveUrl toDownloadUrl(final Path file, final Sharee sharee, CreateDownloadShareRequest options, final PasswordCallback callback) throws BackgroundException { try { if(log.isDebugEnabled()) { log.debug(String.format("Create download share for %s", file)); } if(null == options) { options = new CreateDownloadShareRequest(); log.warn(String.format("Use default share options %s", options)); } final Long fileid = Long.parseLong(nodeid.getVersionId(file)); final Host bookmark = session.getHost(); if(new SDSTripleCryptEncryptorFeature(session, nodeid).isEncrypted(file)) { // get existing file key associated with the sharing user final FileKey key = new NodesApi(session.getClient()).requestUserFileKey(fileid, null, null); final EncryptedFileKey encFileKey = TripleCryptConverter.toCryptoEncryptedFileKey(key); final UserKeyPairContainer keyPairContainer = session.getKeyPairForFileKey(encFileKey.getVersion()); final UserKeyPair userKeyPair = TripleCryptConverter.toCryptoUserKeyPair(keyPairContainer); final Credentials passphrase = new TripleCryptKeyPair().unlock(callback, bookmark, userKeyPair); final PlainFileKey plainFileKey = Crypto.decryptFileKey(encFileKey, userKeyPair.getUserPrivateKey(), passphrase.getPassword().toCharArray()); // encrypt file key with a new key pair final UserKeyPair pair; if(null == options.getPassword()) { pair = Crypto.generateUserKeyPair(session.requiredKeyPairVersion(), callback.prompt( bookmark, LocaleFactory.localizedString("Passphrase", "Cryptomator"), LocaleFactory.localizedString("Provide additional login credentials", "Credentials"), new LoginOptions().icon(session.getHost().getProtocol().disk()) ).getPassword().toCharArray()); } else { pair = Crypto.generateUserKeyPair(session.requiredKeyPairVersion(), options.getPassword().toCharArray()); } final EncryptedFileKey encryptedFileKey = Crypto.encryptFileKey(plainFileKey, pair.getUserPublicKey()); options.setPassword(null); options.setKeyPair(TripleCryptConverter.toSwaggerUserKeyPairContainer(pair)); options.setFileKey(TripleCryptConverter.toSwaggerFileKey(encryptedFileKey)); } final DownloadShare share = new SharesApi(session.getClient()).createDownloadShare( options.nodeId(fileid), StringUtils.EMPTY, null); final String help; if(null == share.getExpireAt()) { help = MessageFormat.format(LocaleFactory.localizedString("{0} URL"), LocaleFactory.localizedString("Pre-Signed", "S3")); } else { final long expiry = share.getExpireAt().getMillis(); help = MessageFormat.format(LocaleFactory.localizedString("{0} URL"), LocaleFactory.localizedString("Pre-Signed", "S3")) + " (" + MessageFormat.format(LocaleFactory.localizedString("Expires {0}", "S3") + ")", UserDateFormatterFactory.get().getShortFormat(expiry * 1000) ); } final Matcher matcher = Pattern.compile(SDSSession.VERSION_REGEX).matcher(session.softwareVersion().getRestApiVersion()); if(matcher.matches()) { if(new Version(matcher.group(1)).compareTo(new Version("4.26")) < 0) { return new DescriptiveUrl(URI.create(String.format("%s://%s/#/public/shares-downloads/%s", bookmark.getProtocol().getScheme(), bookmark.getHostname(), share.getAccessKey())), DescriptiveUrl.Type.signed, help); } } return new DescriptiveUrl(URI.create(String.format("%s://%s/public/download-shares/%s", bookmark.getProtocol().getScheme(), bookmark.getHostname(), share.getAccessKey())), DescriptiveUrl.Type.signed, help); } catch(ApiException e) { throw new SDSExceptionMappingService(nodeid).map(e); } catch(CryptoException e) { throw new TripleCryptExceptionMappingService().map(e); } }
@Test(expected = InteroperabilityException.class) public void testToUrlMissingEmailRecipients() throws Exception { final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session); final Path room = new SDSDirectoryFeature(session, nodeid).mkdir(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path test = new SDSTouchFeature(session, nodeid).touch(new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); try { final DescriptiveUrl url = new SDSShareFeature(session, nodeid).toDownloadUrl(test, Share.Sharee.world, new CreateDownloadShareRequest() .expiration(new ObjectExpiration().enableExpiration(false)) .notifyCreator(false) .sendMail(true) .mailRecipients(null) .sendSms(false) .password(null) .mailSubject(null) .mailBody(null) .maxDownloads(null), new DisabledPasswordCallback()); } finally { new SDSDeleteFeature(session, nodeid).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback()); } }
@Override public void setDebugMode(DebugMode debugMode) { }
@Test public void setDebugMode() { mSensorsAPI.setDebugMode(SensorsDataAPI.DebugMode.DEBUG_OFF); Assert.assertFalse(SALog.isDebug()); }
public ProtocolBuilder dispatcher(String dispatcher) { this.dispatcher = dispatcher; return getThis(); }
@Test void dispatcher() { ProtocolBuilder builder = new ProtocolBuilder(); builder.dispatcher("mockdispatcher"); Assertions.assertEquals("mockdispatcher", builder.build().getDispatcher()); }
public long residentMemorySizeEstimate() { long size = 0; size += Long.BYTES; // value.context.timestamp size += Long.BYTES; // value.context.offset if (topic != null) { size += topic.toCharArray().length; } size += Integer.BYTES; // partition for (final Header header : headers) { size += header.key().toCharArray().length; final byte[] value = header.value(); if (value != null) { size += value.length; } } return size; }
@Test public void shouldEstimateTopicLength() { final ProcessorRecordContext context = new ProcessorRecordContext( 42L, 73L, 0, "topic", new RecordHeaders() ); assertEquals(MIN_SIZE + 5L, context.residentMemorySizeEstimate()); }
@Override public CompletableFuture<Void> deregisterApplication( final ApplicationStatus applicationStatus, final @Nullable String diagnostics) { synchronized (lock) { if (!running || leaderResourceManager == null) { return deregisterWithoutLeaderRm(); } final ResourceManager<?> currentLeaderRM = leaderResourceManager; return currentLeaderRM .getStartedFuture() .thenCompose( ignore -> { synchronized (lock) { if (isLeader(currentLeaderRM)) { return currentLeaderRM .getSelfGateway(ResourceManagerGateway.class) .deregisterApplication( applicationStatus, diagnostics) .thenApply(ack -> null); } else { return deregisterWithoutLeaderRm(); } } }); } }
@Test void deregisterApplication_noLeaderRm() throws Exception { createAndStartResourceManager(); final CompletableFuture<Void> deregisterApplicationFuture = resourceManagerService.deregisterApplication(ApplicationStatus.CANCELED, null); // should not report error assertThatFuture(deregisterApplicationFuture).eventuallySucceeds(); }
public static <T> T copyProperties(Object source, Class<T> tClass, String... ignoreProperties) { if (null == source) { return null; } T target = ReflectUtil.newInstanceIfPossible(tClass); copyProperties(source, target, CopyOptions.create().setIgnoreProperties(ignoreProperties)); return target; }
@Test public void copyPropertiesMapToMapTest() { // 测试MapToMap final Map<String, Object> p1 = new HashMap<>(); p1.put("isSlow", true); p1.put("name", "测试"); p1.put("subName", "sub测试"); final Map<String, Object> map = MapUtil.newHashMap(); BeanUtil.copyProperties(p1, map); assertTrue((Boolean) map.get("isSlow")); assertEquals("测试", map.get("name")); assertEquals("sub测试", map.get("subName")); }
public void setLocalMode(boolean localMode) { if (localMode) { addCapability(Capability.LOCALMODE); } else { removeCapability(Capability.LOCALMODE); } }
@Test public void testSetLocalMode() throws Exception { status.setLocalMode(false); assertFalse(status.hasCapability(ServerStatus.Capability.LOCALMODE)); status.setLocalMode(true); assertTrue(status.hasCapability(ServerStatus.Capability.LOCALMODE)); }
@Override public void run(DiagnosticsLogWriter writer) { metricCollector.writer = writer; // we set the time explicitly so that for this particular rendering of the probes, all metrics have exactly // the same timestamp metricCollector.timeMillis = System.currentTimeMillis(); metricsRegistry.collect(metricCollector); metricCollector.writer = null; }
@Test public void testExclusion() { metricsRegistry.registerStaticMetrics(new ExclusionProbeSource(), "test"); plugin.run(logWriter); assertContains("[unit=count,metric=test.notExcludedLong]=1"); assertNotContains("[unit=count,metric=test.excludedLong]=1"); assertContains("[unit=count,metric=test.notExcludedDouble]=1.5"); assertNotContains("[unit=count,metric=test.excludedDouble]=2.5"); }
@Override public InputFile newInputFile(String path) { return new CachingInputFile(fileContentCache, wrappedIO.newInputFile(path)); }
@Test public void testNewInputFile() { writeIcebergMetaTestFile(); String path = "file:/tmp/0001.metadata.json"; // create iceberg cachingFileIO IcebergCachingFileIO cachingFileIO = new IcebergCachingFileIO(); cachingFileIO.setConf(new Configuration()); Map<String, String> icebergProperties = new HashMap<>(); icebergProperties.put("iceberg.catalog.type", "hive"); cachingFileIO.initialize(icebergProperties); InputFile cachingFileIOInputFile = cachingFileIO.newInputFile(path); cachingFileIOInputFile.newStream(); String cachingFileIOPath = cachingFileIOInputFile.location(); Assert.assertEquals(path, cachingFileIOPath); long cacheIOInputFileSize = cachingFileIOInputFile.getLength(); Assert.assertEquals(cacheIOInputFileSize, 39); cachingFileIO.deleteFile(path); }
@ProtoFactory public static MediaType fromString(String tree) { if (tree == null || tree.isEmpty()) throw CONTAINER.missingMediaType(); Matcher matcher = TREE_PATTERN.matcher(tree); return parseSingleMediaType(tree, matcher, false); }
@Test(expected = EncodingException.class) public void testParseInvalidWeight() { MediaType.fromString("application/json ; q=high"); }
boolean isContentExpected() { return this.equals(DATA); }
@Test public void isContentExpected() { assertTrue(SmtpCommand.valueOf("DATA").isContentExpected()); assertTrue(SmtpCommand.valueOf("data").isContentExpected()); assertFalse(SmtpCommand.HELO.isContentExpected()); assertFalse(SmtpCommand.HELP.isContentExpected()); assertFalse(SmtpCommand.valueOf("DATA2").isContentExpected()); }
public List<RunResponse> startBatch( String workflowId, String version, List<RunRequest> requests) { if (ObjectHelper.isCollectionEmptyOrNull(requests)) { return Collections.emptyList(); } Checks.checkTrue( requests.size() <= Constants.START_BATCH_LIMIT, "The size of Requests is greater than the batch limit"); WorkflowDefinition definition = workflowDao.getWorkflowDefinition(workflowId, version); // Fail the whole batch if any request is invalid requests.forEach(request -> validateRequest(version, definition, request)); RunProperties runProperties = RunProperties.from(definition.getPropertiesSnapshot()); List<WorkflowInstance> instances = createWorkflowInstances( definition.getWorkflow(), definition.getInternalId(), definition.getMetadata().getWorkflowVersionId(), runProperties, requests); RunStrategy runStrategy = definition.getRunStrategyOrDefault(); int[] results = runStrategyDao.startBatchWithRunStrategy(workflowId, runStrategy, instances); List<RunResponse> responses = new ArrayList<>(); int idx = 0; for (WorkflowInstance instance : instances) { responses.add(RunResponse.from(instance, results[idx])); idx++; } LOG.debug( "Created {} of workflow instances for workflow id {} to start", requests.size(), workflowId); return responses; }
@Test public void testStartBatch() { when(runStrategyDao.startBatchWithRunStrategy(any(), any(), any())) .thenReturn(new int[] {1, 0}); RunRequest request = RunRequest.builder() .initiator(new ManualInitiator()) .requestId(UUID.fromString("41f0281e-41a2-468d-b830-56141b2f768b")) .currentPolicy(RunPolicy.START_FRESH_NEW_RUN) .build(); List<RunResponse> responses = actionHandler.startBatch("sample-minimal-wf", "active", Arrays.asList(request, request)); verify(workflowDao, times(1)).getWorkflowDefinition("sample-minimal-wf", "active"); verify(runStrategyDao, times(1)).startBatchWithRunStrategy(any(), any(), any()); assertEquals(2, responses.size()); assertEquals(1L, responses.get(0).getWorkflowVersionId()); assertEquals("41f0281e-41a2-468d-b830-56141b2f768b", responses.get(0).getWorkflowUuid()); assertEquals(RunResponse.Status.WORKFLOW_RUN_CREATED, responses.get(0).getStatus()); assertEquals(1L, responses.get(1).getWorkflowVersionId()); assertEquals("41f0281e-41a2-468d-b830-56141b2f768b", responses.get(1).getWorkflowUuid()); assertEquals(RunResponse.Status.DUPLICATED, responses.get(1).getStatus()); }
public OffsetRange[] getNextOffsetRanges(Option<String> lastCheckpointStr, long sourceLimit, HoodieIngestionMetrics metrics) { // Come up with final set of OffsetRanges to read (account for new partitions, limit number of events) long maxEventsToReadFromKafka = getLongWithAltKeys(props, KafkaSourceConfig.MAX_EVENTS_FROM_KAFKA_SOURCE); long numEvents; if (sourceLimit == Long.MAX_VALUE) { numEvents = maxEventsToReadFromKafka; LOG.info("SourceLimit not configured, set numEvents to default value : {}", maxEventsToReadFromKafka); } else { numEvents = sourceLimit; } long minPartitions = getLongWithAltKeys(props, KafkaSourceConfig.KAFKA_SOURCE_MIN_PARTITIONS); LOG.info("getNextOffsetRanges set config {} to {}", KafkaSourceConfig.KAFKA_SOURCE_MIN_PARTITIONS.key(), minPartitions); return getNextOffsetRanges(lastCheckpointStr, numEvents, minPartitions, metrics); }
@Test public void testGetNextOffsetRangesFromTimestampCheckpointType() { HoodieTestDataGenerator dataGenerator = new HoodieTestDataGenerator(); testUtils.createTopic(testTopicName, 1); testUtils.sendMessages(testTopicName, Helpers.jsonifyRecords(dataGenerator.generateInserts("000", 1000))); KafkaOffsetGen kafkaOffsetGen = new KafkaOffsetGen(getConsumerConfigs("latest", KAFKA_CHECKPOINT_TYPE_TIMESTAMP)); OffsetRange[] nextOffsetRanges = kafkaOffsetGen.getNextOffsetRanges(Option.of(String.valueOf(System.currentTimeMillis() - 100000)), 500, metrics); assertEquals(1, nextOffsetRanges.length); assertEquals(0, nextOffsetRanges[0].fromOffset()); assertEquals(500, nextOffsetRanges[0].untilOffset()); }
@Override public boolean containsActiveConnection(final DataSource dataSource) { return 0 != getActiveConnections(dataSource); }
@Test void assertContainsActiveConnection() throws SQLException { DataSource dataSource = createHikariDataSource(); try (Connection ignored = dataSource.getConnection()) { assertTrue(new HikariDataSourcePoolActiveDetector().containsActiveConnection(dataSource)); } }
@Override public void deleteConfig(Long id) { // 校验配置存在 ConfigDO config = validateConfigExists(id); // 内置配置,不允许删除 if (ConfigTypeEnum.SYSTEM.getType().equals(config.getType())) { throw exception(CONFIG_CAN_NOT_DELETE_SYSTEM_TYPE); } // 删除 configMapper.deleteById(id); }
@Test public void testDeleteConfig_canNotDeleteSystemType() { // mock 数据 ConfigDO dbConfig = randomConfigDO(o -> { o.setType(ConfigTypeEnum.SYSTEM.getType()); // SYSTEM 不允许删除 }); configMapper.insert(dbConfig);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbConfig.getId(); // 调用, 并断言异常 assertServiceException(() -> configService.deleteConfig(id), CONFIG_CAN_NOT_DELETE_SYSTEM_TYPE); }
@VisibleForTesting public void validateDictTypeExists(String type) { DictTypeDO dictType = dictTypeService.getDictType(type); if (dictType == null) { throw exception(DICT_TYPE_NOT_EXISTS); } if (!CommonStatusEnum.ENABLE.getStatus().equals(dictType.getStatus())) { throw exception(DICT_TYPE_NOT_ENABLE); } }
@Test public void testValidateDictTypeExists_notEnable() { // mock 方法,数据类型被禁用 String dictType = randomString(); when(dictTypeService.getDictType(eq(dictType))).thenReturn( randomPojo(DictTypeDO.class, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus()))); // 调用, 并断言异常 assertServiceException(() -> dictDataService.validateDictTypeExists(dictType), DICT_TYPE_NOT_ENABLE); }
public static PTransform<PCollection<?>, PCollection<String>> elements() { return new Elements(); }
@Test @Category(NeedsRunner.class) public void testToStringOf() { Integer[] ints = {1, 2, 3, 4, 5}; String[] strings = {"1", "2", "3", "4", "5"}; PCollection<Integer> input = p.apply(Create.of(Arrays.asList(ints))); PCollection<String> output = input.apply(ToString.elements()); PAssert.that(output).containsInAnyOrder(strings); p.run(); }
@Override // The set of valid retention strategies must // - contain only names of supported strategies // - at least one must stay enabled public void validate(String parameter, Set<String> values) throws ValidationException { if (!values.stream() .filter(s -> !VALID_STRATEGIES.contains(s) && !ARCHIVE_RETENTION_STRATEGY.equals(s)) .collect(Collectors.toSet()).isEmpty()) { throw new ValidationException("Parameter " + parameter + " contains invalid values: " + values); } if (values.containsAll(VALID_STRATEGIES)) { throw new ValidationException(parameter + ":" + values + " At least one retention of the following [none, close, delete], should stay enabled!"); } }
@Test void invalidStrategy() { assertThrows(ValidationException.class, () -> { classUnderTest.validate(PARAM, Set.of("nonsense")); }); }
public static void executeWithRetry(RetryFunction function) throws Exception { executeWithRetry(maxAttempts, minDelay, function); }
@Test public void retryFunctionThatFailsWithMoreAttempts() throws Exception { exceptionRule.expect(SQLException.class); exceptionRule.expectMessage("Problem with connection"); executeWithRetry(4, 1_000, IOITHelperTest::failingFunction); assertEquals(4, listOfExceptionsThrown.size()); }
public static String buildHttpErrorMessage(final HttpURLConnection connection) throws IOException { val messageBuilder = new StringBuilder("(").append(connection.getResponseCode()).append(")"); if (connection.getResponseMessage() != null) { messageBuilder.append(" "); messageBuilder.append(connection.getResponseMessage()); } try (var isr = new InputStreamReader(connection.getErrorStream(), StandardCharsets.UTF_8); var br = new BufferedReader(isr)) { String output; messageBuilder.append("["); while ((output = br.readLine()) != null) { messageBuilder.append(output); } messageBuilder.append("]"); } finally { connection.disconnect(); } return messageBuilder.toString(); }
@Test public void testBuildHttpErrorMessage() throws IOException { // creating mock htp connection HttpURLConnection connectionMock = null ; // expected test data for mock connection var testResponseBody = "{\"error_description\":\"MSIS9612: The authorization code received in [code] parameter is invalid. \"}"; var testConnectionResponseCode =400; var testConnResponseMessage = "Bad Request."; // mocking expected test data try(InputStream in = new ByteArrayInputStream(testResponseBody.getBytes(StandardCharsets.UTF_8))) { connectionMock = Mockito.mock(HttpURLConnection.class); Mockito.when(connectionMock.getResponseCode()).thenReturn(testConnectionResponseCode); Mockito.when(connectionMock.getResponseMessage()).thenReturn(testConnResponseMessage); Mockito.when(connectionMock.getErrorStream()).thenReturn(in); //evaluating test var actual = HttpUtils.buildHttpErrorMessage(connectionMock); var expected = String.format("(%d) %s[%s]", testConnectionResponseCode, testConnResponseMessage, testResponseBody); Assert.assertTrue(expected.equals(actual)); } }
public <T> T parse(String input, Class<T> cls) { return readFlow(input, cls, type(cls)); }
@Test void inputs() { Flow flow = this.parse("flows/valids/inputs.yaml"); assertThat(flow.getInputs().size(), is(27)); assertThat(flow.getInputs().stream().filter(Input::getRequired).count(), is(9L)); assertThat(flow.getInputs().stream().filter(r -> !r.getRequired()).count(), is(18L)); assertThat(flow.getInputs().stream().filter(r -> r.getDefaults() != null).count(), is(1L)); assertThat(flow.getInputs().stream().filter(r -> r instanceof StringInput && ((StringInput)r).getValidator() != null).count(), is(1L)); }
@Override public void handle(HttpServletRequest request, HttpServletResponse response, String resourceName, BlockException ex) throws Exception { // Return 429 (Too Many Requests) by default. response.setStatus(429); PrintWriter out = response.getWriter(); out.print("Blocked by Sentinel (flow limiting)"); out.flush(); out.close(); }
@Test public void handle_writeBlockPage() throws Exception { DefaultBlockExceptionHandler h = new DefaultBlockExceptionHandler(); MockHttpServletRequest req = new MockHttpServletRequest("GET", "/a/b/c"); req.setQueryString("a=1&b=2"); MockHttpServletResponse resp = new MockHttpServletResponse(); String resourceName = "/a/b/c"; BlockException ex = new FlowException("msg"); h.handle(req, resp, resourceName, ex); assertEquals(429, resp.getStatus()); }
public static Optional<KsqlAuthorizationValidator> create( final KsqlConfig ksqlConfig, final ServiceContext serviceContext, final Optional<KsqlAuthorizationProvider> externalAuthorizationProvider ) { final Optional<KsqlAccessValidator> accessValidator = getAccessValidator( ksqlConfig, serviceContext, externalAuthorizationProvider ); return accessValidator.map(v -> new KsqlAuthorizationValidatorImpl(cacheIfEnabled(ksqlConfig, v))); }
@Test public void shouldReturnEmptyValidatorIfKafkaBrokerVersionTooLowAndExceptionWrapped() throws InterruptedException, ExecutionException { // Given: givenSingleNode(); givenAuthorizerClass("a-class"); final KafkaFuture<Set<AclOperation>> authorized = mockAuthorizedOperationsFuture(); final DescribeClusterResult result = mock(DescribeClusterResult.class); when(adminClient.describeCluster(any())).thenReturn(result); when(result.authorizedOperations()).thenReturn(authorized); when(authorized.get()) .thenThrow(new ExecutionException(new UnsupportedVersionException("too old"))); // When: final Optional<KsqlAuthorizationValidator> validator = KsqlAuthorizationValidatorFactory.create( ksqlConfig, serviceContext, Optional.empty() ); // Then assertThat(validator, is(Optional.empty())); }
public Ce.Task formatActivity(DbSession dbSession, CeActivityDto dto, @Nullable String scannerContext) { return formatActivity(dto, DtoCache.forActivityDtos(dbClient, dbSession, singletonList(dto)), scannerContext); }
@Test public void formatActivity_filterWarnings_andSetWarningsAndCount() { TestActivityDto dto = newActivity("UUID", "COMPONENT_UUID", CeActivityDto.Status.FAILED, null); CeTaskMessageDto warning1 = createCeTaskMessageDto(1998, MessageType.GENERIC); CeTaskMessageDto warning2 = createCeTaskMessageDto(1999, MessageType.GENERIC); List<CeTaskMessageDto> ceTaskMessageDtos = new ArrayList<>(dto.getCeTaskMessageDtos()); ceTaskMessageDtos.add(warning1); ceTaskMessageDtos.add(warning2); dto.setCeTaskMessageDtos(ceTaskMessageDtos); Ce.Task wsTask = underTest.formatActivity(db.getSession(), dto, null); assertThat(wsTask.getWarningCount()).isEqualTo(2); assertThat(wsTask.getWarningsList()).hasSameElementsAs(getMessagesText(List.of(warning1, warning2))); }
@Override public Set<ProfileDescription> find(final Visitor visitor) throws BackgroundException { if(log.isInfoEnabled()) { log.info(String.format("Fetch profiles from %s", session.getHost())); } final ProfileFilter filter = new ProfileFilter(); final AttributedList<Path> list = session.getFeature(ListService.class).list(new DelegatingHomeFeature( new DefaultPathHomeFeature(session.getHost())).find(), new DisabledListProgressListener()); return list.filter(filter).toStream().map(file -> visitor.visit(new RemoteProfileDescription(protocols, file, new LazyInitializer<Local>() { @Override protected Local initialize() throws ConcurrentException { try { final Local local = temp.create("profiles", file); final TransferPathFilter filter = comparison .withFinder(new Find() { @Override public boolean find(final Path file, final ListProgressListener listener) { return true; } }) .withAttributes(new AttributesFinder() { @Override public PathAttributes find(final Path file, final ListProgressListener listener) { return file.attributes(); } }); if(filter.accept(file, local, new TransferStatus().exists(true))) { final Read read = session.getFeature(Read.class); if(log.isInfoEnabled()) { log.info(String.format("Download profile %s", file)); } // Read latest version try (InputStream in = read.read(file.withAttributes(new PathAttributes(file.attributes()) // Read latest version .withVersionId(null)), new TransferStatus().withLength(TransferStatus.UNKNOWN_LENGTH), new DisabledConnectionCallback()); OutputStream out = local.getOutputStream(false)) { IOUtils.copy(in, out); } } return local; } catch(BackgroundException | IOException e) { throw new ConcurrentException(e); } } } ))).collect(Collectors.toSet()); }
@Test public void find() throws Exception { final ProtocolFactory protocols = new ProtocolFactory(new HashSet<>(Arrays.asList(new TestProtocol() { @Override public String getIdentifier() { return "s3"; } @Override public Type getType() { return Type.s3; } @Override public boolean isEnabled() { return false; } }, new TestProtocol() { @Override public String getIdentifier() { return "davs"; } @Override public Type getType() { return Type.dav; } @Override public boolean isEnabled() { return false; } }))); final TestProtocol protocol = new TestProtocol() { @Override public String getIdentifier() { return "davs"; } @Override public Scheme getScheme() { return Scheme.https; } @Override public Type getType() { return Type.dav; } @Override public boolean isEnabled() { return false; } }; final Host host = new HostParser(protocols, protocol).get("https://svn.cyberduck.io/trunk/profiles"); final NullSession session = new NullSession(host); session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); final RemoteProfilesFinder finder = new RemoteProfilesFinder(session); final Set<ProfileDescription> stream = finder.find(); assertTrue(stream.isEmpty()); session.close(); }
@Override public Connection getConnection(Properties properties, String connectionString, SSLContextSettings sslContextSettings) { try { RestClientConfigurationBuilder builder = new RestClientConfigurationBuilder().withProperties(properties); if (connectionString == null || connectionString.isEmpty() || "-".equals(connectionString)) { builder.addServer().host("localhost").port(11222); } else { Matcher matcher = HOST_PORT.matcher(connectionString); if (matcher.matches()) { String host = matcher.group(1); String port = matcher.group(2); builder.addServer().host(host).port(port != null ? Integer.parseInt(port) : 11222); } else { URL url = new URL(connectionString); if (!url.getProtocol().equals("http") && !url.getProtocol().equals("https")) { throw new IllegalArgumentException(); } int port = url.getPort(); builder.addServer().host(url.getHost()).port(port > 0 ? port : url.getDefaultPort()); String userInfo = url.getUserInfo(); if (userInfo != null) { String[] split = userInfo.split(":"); builder.security().authentication().username(URLDecoder.decode(split[0], StandardCharsets.UTF_8)); if (split.length == 2) { builder.security().authentication().password(URLDecoder.decode(split[1], StandardCharsets.UTF_8)); } } if (url.getProtocol().equals("https")) { SslConfigurationBuilder ssl = builder.security().ssl().enable(); if (sslContextSettings != null) { ssl.sslContext(sslContextSettings.getSslContext()) .trustManagers(sslContextSettings.getTrustManagers()) .hostnameVerifier(sslContextSettings.getHostnameVerifier()); } } } } builder.header("User-Agent", Version.getBrandName() + " CLI " + Version.getBrandVersion()); return new RestConnection(builder); } catch (Throwable e) { return null; } }
@Test public void testUrlWithSSL() throws NoSuchAlgorithmException { RestConnector connector = new RestConnector(); RestConnection connection = (RestConnection) connector.getConnection(new Properties(),"https://localhost", null); RestClientConfigurationBuilder builder = connection.getBuilder(); builder.security().ssl().sslContext(SSLContext.getDefault()).trustManagers(new TrustManager[]{new ZeroSecurityTrustManager()}); RestClientConfiguration configuration = builder.build(); assertEquals(443, configuration.servers().get(0).port()); assertEquals("localhost", configuration.servers().get(0).host()); assertFalse(configuration.security().authentication().enabled()); assertTrue(configuration.security().ssl().enabled()); }
protected boolean fastpath() { return false; }
@Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, population = Population.EMPTY, maximumSize = Maximum.FULL, weigher = CacheWeigher.DISABLED, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, expiry = CacheExpiry.DISABLED, keys = ReferenceType.STRONG, values = ReferenceType.STRONG) public void fastpath(BoundedLocalCache<Int, Int> cache, CacheContext context) { assertThat(cache.skipReadBuffer()).isTrue(); for (int i = 0; i < (context.maximumSize() / 2) - 1; i++) { var oldValue = cache.put(Int.valueOf(i), Int.valueOf(-i)); assertThat(oldValue).isNull(); } assertThat(cache.skipReadBuffer()).isTrue(); var oldValue = cache.put(Int.valueOf(-1), Int.valueOf(-1)); assertThat(oldValue).isNull(); assertThat(cache.skipReadBuffer()).isFalse(); assertThat(cache.get(Int.valueOf(0))).isNotNull(); assertThat(cache.readBuffer.writes()).isEqualTo(1); cache.cleanUp(); assertThat(cache.readBuffer.reads()).isEqualTo(1); }
public OpenAPI read(Class<?> cls) { return read(cls, resolveApplicationPath(), null, false, null, null, new LinkedHashSet<String>(), new ArrayList<Parameter>(), new HashSet<Class<?>>()); }
@Test(description = "Responses with array schema") public void testTicket2763() { Reader reader = new Reader(new OpenAPI()); OpenAPI openAPI = reader.read(Ticket2763Resource.class); String yaml = "openapi: 3.0.1\n" + "paths:\n" + " /array:\n" + " get:\n" + " operationId: getArrayResponses\n" + " responses:\n" + " default:\n" + " content:\n" + " application/json:\n" + " schema:\n" + " type: array\n" + " items:\n" + " $ref: https://openebench.bsc.es/monitor/tool/tool.json\n" + " /schema:\n" + " get:\n" + " operationId: getSchemaResponses\n" + " responses:\n" + " default:\n" + " content:\n" + " application/json:\n" + " schema:\n" + " $ref: https://openebench.bsc.es/monitor/tool/tool.json"; SerializationMatchers.assertEqualsToYaml(openAPI, yaml); }
protected void updateHost(MacAddress mac, VlanId vlan, Set<HostLocation> locations, Set<IpAddress> ips) { HostId hid = HostId.hostId(mac, vlan); HostDescription desc = new DefaultHostDescription(mac, vlan, locations, ips, true); providerService.hostDetected(hid, desc, true); }
@Test public void testUpdateHost() throws Exception { provider.updateHost(mac, vlan, locations, auxLocations, ips, innerVlan, outerTpid); assertThat(providerService.hostId, is(hostId)); assertThat(providerService.hostDescription, is(hostDescription)); assertThat(providerService.event, is("hostDetected")); providerService.clear(); }
@VisibleForTesting() void checkDiskUsage() { final Map<Notification.Type, List<String>> notificationTypePerNodeIdentifier = new HashMap<>(); try { ClusterAllocationDiskSettings settings = cluster.getClusterAllocationDiskSettings(); if (settings.ThresholdEnabled()) { final Set<NodeDiskUsageStats> diskUsageStats = cluster.getDiskUsageStats(); for (NodeDiskUsageStats nodeDiskUsageStats : diskUsageStats) { if (!nodeHoldsData(nodeDiskUsageStats)) { LOG.debug("Ignoring non-data ES node <{}/{}> with roles <{}> for disk usage check.", nodeDiskUsageStats.host(), nodeDiskUsageStats.ip(), nodeDiskUsageStats.roles()); continue; } Notification.Type currentNodeNotificationType = null; WatermarkSettings<?> watermarkSettings = settings.watermarkSettings(); if (watermarkSettings instanceof PercentageWatermarkSettings) { currentNodeNotificationType = getDiskUsageNotificationTypeByPercentage((PercentageWatermarkSettings) watermarkSettings, nodeDiskUsageStats); } else if (watermarkSettings instanceof AbsoluteValueWatermarkSettings) { currentNodeNotificationType = getDiskUsageNotificationTypeByAbsoluteValues((AbsoluteValueWatermarkSettings) watermarkSettings, nodeDiskUsageStats); } if (currentNodeNotificationType != null) { String nodeIdentifier = firstNonNull(nodeDiskUsageStats.host(), nodeDiskUsageStats.ip()); notificationTypePerNodeIdentifier.merge(currentNodeNotificationType, Collections.singletonList(nodeIdentifier), (prev, cur) -> ImmutableList.<String>builder() .addAll(prev) .addAll(cur) .build()); } } if (notificationTypePerNodeIdentifier.isEmpty()) { fixAllDiskUsageNotifications(); } else { publishDiskUsageNotifications(notificationTypePerNodeIdentifier); } } } catch (Exception e) { LOG.error("Error while trying to check Elasticsearch disk usage.Details: " + e.getMessage()); } }
@Test public void fixAllDiskUsageNotificationsPercentage() throws Exception { Set<NodeDiskUsageStats> nodeDiskUsageStats = mockNodeDiskUsageStats(); when(cluster.getDiskUsageStats()).thenReturn(nodeDiskUsageStats); when(cluster.getClusterAllocationDiskSettings()).thenReturn(buildThresholdNotTriggeredClusterAllocationDiskSettings(WatermarkSettings.SettingsType.PERCENTAGE)); indexerClusterCheckerThread.checkDiskUsage(); verify(notificationService, never()).publishIfFirst(any()); verify(notificationService, times(1)).fixed(Notification.Type.ES_NODE_DISK_WATERMARK_FLOOD_STAGE); verify(notificationService, times(1)).fixed(Notification.Type.ES_NODE_DISK_WATERMARK_HIGH); verify(notificationService, times(1)).fixed(ES_NODE_DISK_WATERMARK_LOW); }
@Udf public String elt( @UdfParameter(description = "the nth element to extract") final int n, @UdfParameter(description = "the strings of which to extract the nth") final String... args ) { if (args == null) { return null; } if (n < 1 || n > args.length) { return null; } return args[n - 1]; }
@Test public void shouldSelectFirstElementOfOne() { // When: final String el = elt.elt(1, "a"); // Then: assertThat(el, equalTo("a")); }
@Override public File getScannerEngine() { File scannerDir = new File(fs.getHomeDir(), "lib/scanner"); if (!scannerDir.exists()) { throw new NotFoundException(format("Scanner directory not found: %s", scannerDir.getAbsolutePath())); } return listFiles(scannerDir, VISIBLE, directoryFileFilter()) .stream() .filter(file -> file.getName().endsWith(".jar")) .findFirst() .orElseThrow(() -> new NotFoundException(format("Scanner JAR not found in directory: %s", scannerDir.getAbsolutePath()))); }
@Test void getScannerEngine_shouldFail_whenScannerDirNotFound() throws IOException { deleteIfExists(scannerDir); assertThatThrownBy(() -> scannerEngineHandler.getScannerEngine()) .isInstanceOf(NotFoundException.class) .hasMessage(format("Scanner directory not found: %s", scannerDir.toAbsolutePath())); }
@Override public AppResponse process(Flow flow, AppSessionRequest request) { if (appSession.getRegistrationId() == null) { return new NokResponse(); } Map<String, String> result = digidClient.getExistingAccount(appSession.getRegistrationId(), appSession.getLanguage()); if (result.get(lowerUnderscore(STATUS)).equals("OK") && result.get(lowerUnderscore(ACCOUNT_ID)) != null) { appSession.setAccountId(Long.valueOf(result.get(lowerUnderscore(ACCOUNT_ID)))); digidClient.remoteLog("54", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); return new OkResponse(); } else if (result.get(lowerUnderscore(STATUS)).equals("PENDING")) { // switch state to require replace action appSession.setState(State.EXISTING_ACCOUNT_FOUND.name()); return new StatusResponse("PENDING"); } else { return new NokResponse(); } }
@Test void processOKTest(){ when(digidClientMock.getExistingAccount(1337L, "NL")).thenReturn(Map.of( lowerUnderscore(STATUS), "OK", lowerUnderscore(ACCOUNT_ID), "1" )); AppResponse appResponse = checkExistingAccount.process(flowMock, null); assertEquals(1, checkExistingAccount.getAppSession().getAccountId()); assertTrue(appResponse instanceof OkResponse); assertEquals("OK", ((OkResponse) appResponse).getStatus()); verify(digidClientMock, times(1)).remoteLog("54", Map.of(lowerUnderscore(ACCOUNT_ID), 1L)); }
@Override public void consume(Update update) { super.consume(update); }
@Test void canProcessChannelPosts() { Update update = mock(Update.class); Message message = mock(Message.class); when(message.getChatId()).thenReturn(1L); when(update.getChannelPost()).thenReturn(message); when(update.hasChannelPost()).thenReturn(true); bot.consume(update); String expected = "test channel post"; verify(silent, times(1)).send(expected, 1); }
@Override public Response request(Request request, long timeouts) throws NacosException { Payload grpcRequest = GrpcUtils.convert(request); ListenableFuture<Payload> requestFuture = grpcFutureServiceStub.request(grpcRequest); Payload grpcResponse; try { if (timeouts <= 0) { grpcResponse = requestFuture.get(); } else { grpcResponse = requestFuture.get(timeouts, TimeUnit.MILLISECONDS); } } catch (Exception e) { throw new NacosException(NacosException.SERVER_ERROR, e); } return (Response) GrpcUtils.parse(grpcResponse); }
@Test void testRequestSuccessAsync() throws NacosException { Response response = connection.request(new HealthCheckRequest(), 100); assertTrue(response instanceof HealthCheckResponse); }
public MetadataReportBuilder retryTimes(Integer retryTimes) { this.retryTimes = retryTimes; return getThis(); }
@Test void retryTimes() { MetadataReportBuilder builder = new MetadataReportBuilder(); builder.retryTimes(1); Assertions.assertEquals(1, builder.build().getRetryTimes()); }
@Nullable public static Extractor getExtractorInstance( @NonNull Context context, @NonNull File file, @NonNull String outputPath, @NonNull Extractor.OnUpdate listener, @NonNull UpdatePosition updatePosition) { Extractor extractor; String type = getExtension(file.getPath()); if (isZip(type)) { extractor = new ZipExtractor(context, file.getPath(), outputPath, listener, updatePosition); } else if (BuildConfig.FLAVOR.equals("play") && isRar(type)) { extractor = new RarExtractor(context, file.getPath(), outputPath, listener, updatePosition); } else if (isTar(type)) { extractor = new TarExtractor(context, file.getPath(), outputPath, listener, updatePosition); } else if (isGzippedTar(type)) { extractor = new TarGzExtractor(context, file.getPath(), outputPath, listener, updatePosition); } else if (isBzippedTar(type)) { extractor = new TarBzip2Extractor(context, file.getPath(), outputPath, listener, updatePosition); } else if (isXzippedTar(type)) { extractor = new TarXzExtractor(context, file.getPath(), outputPath, listener, updatePosition); } else if (isLzippedTar(type)) { extractor = new TarLzmaExtractor(context, file.getPath(), outputPath, listener, updatePosition); } else if (is7zip(type)) { extractor = new SevenZipExtractor(context, file.getPath(), outputPath, listener, updatePosition); } else if (isLzma(type)) { extractor = new LzmaExtractor(context, file.getPath(), outputPath, listener, updatePosition); } else if (isXz(type)) { extractor = new XzExtractor(context, file.getPath(), outputPath, listener, updatePosition); } else if (isGzip(type)) { extractor = new GzipExtractor(context, file.getPath(), outputPath, listener, updatePosition); } else if (isBzip2(type)) { extractor = new Bzip2Extractor(context, file.getPath(), outputPath, listener, updatePosition); } else { if (BuildConfig.DEBUG) { throw new IllegalArgumentException("The compressed file has no way of opening it: " + file); } LOG.error("The compressed file has no way of opening it: " + file); extractor = null; } return extractor; }
@Test public void getExtractorInstance() { UpdatePosition updatePosition = ServiceWatcherUtil.UPDATE_POSITION; File file = new File("/test/test.zip"); // .zip used by ZipExtractor Extractor result = CompressedHelper.getExtractorInstance( context, file, "/test2", emptyUpdateListener, updatePosition); assertEquals(result.getClass(), ZipExtractor.class); file = new File("/test/test.jar"); // .jar used by ZipExtractor result = CompressedHelper.getExtractorInstance( context, file, "/test2", emptyUpdateListener, updatePosition); assertEquals(result.getClass(), ZipExtractor.class); file = new File("/test/test.apk"); // .apk used by ZipExtractor result = CompressedHelper.getExtractorInstance( context, file, "/test2", emptyUpdateListener, updatePosition); assertEquals(result.getClass(), ZipExtractor.class); file = new File("/test/test.tar"); // .tar used by TarExtractor result = CompressedHelper.getExtractorInstance( context, file, "/test2", emptyUpdateListener, updatePosition); assertEquals(result.getClass(), TarExtractor.class); file = new File("/test/test.tar.gz"); // .tar.gz used by GzipExtractor result = CompressedHelper.getExtractorInstance( context, file, "/test2", emptyUpdateListener, updatePosition); assertEquals(result.getClass(), TarGzExtractor.class); file = new File("/test/test.tgz"); // .tgz used by GzipExtractor result = CompressedHelper.getExtractorInstance( context, file, "/test2", emptyUpdateListener, updatePosition); assertEquals(result.getClass(), TarGzExtractor.class); if (BuildConfig.FLAVOR == "play") { file = new File("/test/test.rar"); // .rar used by RarExtractor result = CompressedHelper.getExtractorInstance( context, file, "/test2", emptyUpdateListener, updatePosition); assertEquals(result.getClass(), RarExtractor.class); } file = new File("/test/test.tar.bz2"); // .rar used by RarExtractor result = CompressedHelper.getExtractorInstance( context, file, "/test2", emptyUpdateListener, updatePosition); assertEquals(result.getClass(), TarBzip2Extractor.class); file = new File("/test/test.tbz"); // .rar used by RarExtractor result = CompressedHelper.getExtractorInstance( context, file, "/test2", emptyUpdateListener, updatePosition); assertEquals(result.getClass(), TarBzip2Extractor.class); file = new File("/test/test.7z"); result = CompressedHelper.getExtractorInstance( context, file, "/test2", emptyUpdateListener, updatePosition); assertEquals(result.getClass(), SevenZipExtractor.class); file = new File("/test/test.tar.xz"); result = CompressedHelper.getExtractorInstance( context, file, "/test2", emptyUpdateListener, updatePosition); assertEquals(result.getClass(), TarXzExtractor.class); file = new File("/test/test.tar.lzma"); result = CompressedHelper.getExtractorInstance( context, file, "/test2", emptyUpdateListener, updatePosition); assertEquals(result.getClass(), TarLzmaExtractor.class); }
@Nonnull public static String removeBracketsFromIpv6Address(@Nonnull final String address) { final String result; if (address.startsWith("[") && address.endsWith("]")) { result = address.substring(1, address.length()-1); try { Ipv6.parse(result); // The remainder is a valid IPv6 address. Return the original value. return result; } catch (IllegalArgumentException e) { // The remainder isn't a valid IPv6 address. Return the original value. return address; } } // Not a bracket-enclosed string. Return the original input. return address; }
@Test public void stripBracketsNonIPNoBrackets() throws Exception { // Setup test fixture. final String input = "Foo Bar"; // Execute system under test. final String result = AuthCheckFilter.removeBracketsFromIpv6Address(input); // Verify result. assertEquals(input, result); // Should only strip brackets from IPv6, nothing else. }
public static boolean toBoolean(final Literal literal, final String propertyName) { final String value = literal.getValue().toString(); final boolean isTrue = value.equalsIgnoreCase("true"); final boolean isFalse = value.equalsIgnoreCase("false"); if (!isTrue && !isFalse) { throw new KsqlException("Property '" + propertyName + "' is not a boolean value"); } return isTrue; }
@Test public void shouldThrowConvertingOtherLiteralTypesToBoolean() { // When: final Exception e = assertThrows( KsqlException.class, () -> LiteralUtil.toBoolean(new LongLiteral(10), "bob") ); // Then: assertThat(e.getMessage(), containsString("Property 'bob' is not a boolean value")); }
public long asHz() { return frequency; }
@Test public void testasHz() { Frequency frequency = Frequency.ofKHz(1); assertThat(frequency.asHz(), is(1000L)); }
@Override public Num calculate(BarSeries series, Position position) { Num profitLossRatio = profitLossRatioCriterion.calculate(series, position); Num numberOfPositions = numberOfPositionsCriterion.calculate(series, position); Num numberOfWinningPositions = numberOfWinningPositionsCriterion.calculate(series, position); return calculate(series, profitLossRatio, numberOfWinningPositions, numberOfPositions); }
@Test public void calculateWithMixedPositions() { MockBarSeries series = new MockBarSeries(numFunction, 100, 110, 80, 130, 150, 160); TradingRecord tradingRecord = new BaseTradingRecord(Trade.buyAt(0, series), Trade.sellAt(2, series), Trade.buyAt(3, series), Trade.sellAt(5, series)); AnalysisCriterion avgLoss = getCriterion(); assertNumEquals(0.25, avgLoss.calculate(series, tradingRecord)); }
@ScalarOperator(LESS_THAN_OR_EQUAL) @SqlType(StandardTypes.BOOLEAN) public static boolean lessThanOrEqual(@SqlType(StandardTypes.SMALLINT) long left, @SqlType(StandardTypes.SMALLINT) long right) { return left <= right; }
@Test public void testLessThanOrEqual() { assertFunction("SMALLINT'37' <= SMALLINT'37'", BOOLEAN, true); assertFunction("SMALLINT'37' <= SMALLINT'17'", BOOLEAN, false); assertFunction("SMALLINT'17' <= SMALLINT'37'", BOOLEAN, true); assertFunction("SMALLINT'17' <= SMALLINT'17'", BOOLEAN, true); }
@Deprecated public static String getJwt(JwtClaims claims) throws JoseException { String jwt; RSAPrivateKey privateKey = (RSAPrivateKey) getPrivateKey( jwtConfig.getKey().getFilename(),jwtConfig.getKey().getPassword(), jwtConfig.getKey().getKeyName()); // A JWT is a JWS and/or a JWE with JSON claims as the payload. // In this example it is a JWS nested inside a JWE // So we first create a JsonWebSignature object. JsonWebSignature jws = new JsonWebSignature(); // The payload of the JWS is JSON content of the JWT Claims jws.setPayload(claims.toJson()); // The JWT is signed using the sender's private key jws.setKey(privateKey); // Get provider from security config file, it should be two digit // And the provider id will set as prefix for keyid in the token header, for example: 05100 // if there is no provider id, we use "00" for the default value String provider_id = ""; if (jwtConfig.getProviderId() != null) { provider_id = jwtConfig.getProviderId(); if (provider_id.length() == 1) { provider_id = "0" + provider_id; } else if (provider_id.length() > 2) { logger.error("provider_id defined in the security.yml file is invalid; the length should be 2"); provider_id = provider_id.substring(0, 2); } } jws.setKeyIdHeaderValue(provider_id + jwtConfig.getKey().getKid()); // Set the signature algorithm on the JWT/JWS that will integrity protect the claims jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256); // Sign the JWS and produce the compact serialization, which will be the inner JWT/JWS // representation, which is a string consisting of three dot ('.') separated // base64url-encoded parts in the form Header.Payload.Signature jwt = jws.getCompactSerialization(); return jwt; }
@Test public void longlivedLightPortalController() throws Exception { JwtClaims claims = ClaimsUtil.getTestClaims("[email protected]", "EMPLOYEE", "f7d42348-c647-4efb-a52d-4c5787421e73", Arrays.asList("portal.r", "portal.w"), "user CtlPltAdmin CtlPltRead CtlPltWrite"); claims.setExpirationTimeMinutesInTheFuture(5256000); String jwt = JwtIssuer.getJwt(claims, long_kid, KeyUtil.deserializePrivateKey(long_key, KeyUtil.RSA)); System.out.println("***Long lived token for portal controller ***: " + jwt); }
@VisibleForTesting public NotifyTemplateDO validateNotifyTemplate(String templateCode) { // 获得站内信模板。考虑到效率,从缓存中获取 NotifyTemplateDO template = notifyTemplateService.getNotifyTemplateByCodeFromCache(templateCode); // 站内信模板不存在 if (template == null) { throw exception(NOTICE_NOT_FOUND); } return template; }
@Test public void testCheckMailTemplateValid_notExists() { // 准备参数 String templateCode = randomString(); // mock 方法 // 调用,并断言异常 assertServiceException(() -> notifySendService.validateNotifyTemplate(templateCode), NOTICE_NOT_FOUND); }
@Override public void awaitTerminated() throws InterruptedException { doAction(Executable::awaitTerminated); }
@Test public void shouldJoinAll() throws Exception { // When: multiExecutable.awaitTerminated(); // Then: final InOrder inOrder = Mockito.inOrder(executable1, executable2); inOrder.verify(executable1).awaitTerminated(); inOrder.verify(executable2).awaitTerminated(); inOrder.verifyNoMoreInteractions(); }