focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
boolean isPostComment(Comment comment) { return Ref.groupKindEquals(comment.getSpec().getSubjectRef(), POST_GVK); }
@Test void isPostCommentTest() { var comment = createComment(); comment.getSpec() .setSubjectRef(Ref.of("fake-post", GroupVersionKind.fromExtension(Post.class))); assertThat(reasonPublisher.isPostComment(comment)).isTrue(); comment.getSpec() .setSubjectRef(Ref.of("fake-post", GroupVersionKind.fromExtension(SinglePage.class))); assertThat(reasonPublisher.isPostComment(comment)).isFalse(); }
@Override /** * Parses the given text to transform it to the desired target type. * @param text The LLM output in string format. * @return The parsed output in the desired target type. */ public T convert(@NonNull String text) { try { // Remove leading and trailing whitespace text = text.trim(); // Check for and remove triple backticks and "json" identifier if (text.startsWith("```") && text.endsWith("```")) { // Remove the first line if it contains "```json" String[] lines = text.split("\n", 2); if (lines[0].trim().equalsIgnoreCase("```json")) { text = lines.length > 1 ? lines[1] : ""; } else { text = text.substring(3); // Remove leading ``` } // Remove trailing ``` text = text.substring(0, text.length() - 3); // Trim again to remove any potential whitespace text = text.trim(); } return (T) this.objectMapper.readValue(text, this.typeRef); } catch (JsonProcessingException e) { logger.error("Could not parse the given text to the desired target type:" + text + " into " + this.typeRef); throw new RuntimeException(e); } }
@Test public void convertTypeReference() { var converter = new BeanOutputConverter<>(new ParameterizedTypeReference<TestClass>() { }); var testClass = converter.convert("{ \"someString\": \"some value\" }"); assertThat(testClass.getSomeString()).isEqualTo("some value"); }
public Value parse(String json) { return this.delegate.parse(json); }
@Test public void testExponentialInteger1() throws Exception { final JsonParser parser = new JsonParser(); final Value msgpackValue = parser.parse("12345e3"); assertTrue(msgpackValue.getValueType().isNumberType()); // TODO: Consider this needs to be an integer? // See: https://github.com/embulk/embulk/issues/775 assertTrue(msgpackValue.getValueType().isFloatType()); assertFalse(msgpackValue.getValueType().isIntegerType()); assertFalse(msgpackValue.getValueType().isStringType()); assertEquals(12345000.0, msgpackValue.asFloatValue().toDouble(), 0.000000001); // Not sure this |toString| is to be tested... assertEquals("1.2345E7", msgpackValue.asFloatValue().toString()); }
@Override public Set<ConstraintCheckResult> checkConstraints(Collection<Constraint> requestedConstraints) { final ImmutableSet.Builder<ConstraintCheckResult> fulfilledConstraints = ImmutableSet.builder(); for (Constraint constraint : requestedConstraints) { if (constraint instanceof GraylogVersionConstraint) { final GraylogVersionConstraint versionConstraint = (GraylogVersionConstraint) constraint; final Requirement requiredVersion = versionConstraint.version(); final ConstraintCheckResult constraintCheckResult = ConstraintCheckResult.create(versionConstraint, requiredVersion.isSatisfiedBy(graylogVersion.withClearedSuffixAndBuild())); fulfilledConstraints.add(constraintCheckResult); } } return fulfilledConstraints.build(); }
@Test public void checkConstraintsFails() { final GraylogVersionConstraintChecker constraintChecker = new GraylogVersionConstraintChecker("1.0.0"); final GraylogVersionConstraint graylogVersionConstraint = GraylogVersionConstraint.builder() .version("^2.0.0") .build(); final PluginVersionConstraint pluginVersionConstraint = PluginVersionConstraint.builder() .pluginId("unique-id") .version("^1.0.0") .build(); final ImmutableSet<Constraint> requiredConstraints = ImmutableSet.of(graylogVersionConstraint, pluginVersionConstraint); final Set<ConstraintCheckResult> result = constraintChecker.checkConstraints(requiredConstraints); assertThat(result.stream().allMatch(c -> !c.fulfilled())).isTrue(); }
public static ClassLoader getClassLoader(Class<?> clazz) { ClassLoader cl = null; if (!clazz.getName().startsWith("org.apache.dubbo")) { cl = clazz.getClassLoader(); } if (cl == null) { try { cl = Thread.currentThread().getContextClassLoader(); } catch (Exception ignored) { // Cannot access thread context ClassLoader - falling back to system class loader... } if (cl == null) { // No thread context class loader -> use class loader of this class. cl = clazz.getClassLoader(); if (cl == null) { // getClassLoader() returning null indicates the bootstrap ClassLoader try { cl = ClassLoader.getSystemClassLoader(); } catch (Exception ignored) { // Cannot access system ClassLoader - oh well, maybe the caller can live with null... } } } } return cl; }
@Test void testGetClassLoader2() { assertThat(ClassUtils.getClassLoader(), sameInstance(ClassUtils.class.getClassLoader())); }
@VisibleForTesting public void validateDictTypeExists(String type) { DictTypeDO dictType = dictTypeService.getDictType(type); if (dictType == null) { throw exception(DICT_TYPE_NOT_EXISTS); } if (!CommonStatusEnum.ENABLE.getStatus().equals(dictType.getStatus())) { throw exception(DICT_TYPE_NOT_ENABLE); } }
@Test public void testValidateDictTypeExists_notEnable() { // mock 方法,数据类型被禁用 String dictType = randomString(); when(dictTypeService.getDictType(eq(dictType))).thenReturn( randomPojo(DictTypeDO.class, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus()))); // 调用, 并断言异常 assertServiceException(() -> dictDataService.validateDictTypeExists(dictType), DICT_TYPE_NOT_ENABLE); }
public String getMeta(String key) { return metadata.get(key); }
@Test public void testGetMeta() { ZCert cert = new ZCert(); cert.setMeta("version", "1"); ZMetadata meta = cert.getMetadata(); String version = meta.get("version"); assertThat(version, is("1")); meta.set("version", "2"); version = cert.getMeta("version"); assertThat(version, is("2")); }
@VisibleForTesting void validateNameUnique(List<MemberLevelDO> list, Long id, String name) { for (MemberLevelDO levelDO : list) { if (ObjUtil.notEqual(levelDO.getName(), name)) { continue; } if (id == null || !id.equals(levelDO.getId())) { throw exception(LEVEL_NAME_EXISTS, levelDO.getName()); } } }
@Test public void testCreateLevel_nameUnique() { // 准备参数 String name = randomString(); // mock 数据 memberlevelMapper.insert(randomLevelDO(o -> o.setName(name))); // 调用,校验异常 List<MemberLevelDO> list = memberlevelMapper.selectList(); assertServiceException(() -> levelService.validateNameUnique(list, null, name), LEVEL_NAME_EXISTS, name); }
public String compile(final String xls, final String template, int startRow, int startCol) { return compile( xls, template, InputType.XLS, startRow, startCol ); }
@Test public void testIntegration() throws Exception { final String drl = converter.compile("/data/IntegrationExampleTestForTemplates.drl.xls", "/templates/test_integration.drl", 18, 3); KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(drl.getBytes()), ResourceType.DRL); assertThat(kbuilder.hasErrors()).isFalse(); InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addPackages(kbuilder.getKnowledgePackages()); KieSession kSession = kbase.newKieSession(); //ASSERT AND FIRE kSession.insert(new Cheese("stilton", 42)); kSession.insert(new Person("michael", "stilton", 42)); List<String> list = new ArrayList<>(); kSession.setGlobal("list", list); kSession.fireAllRules(); assertThat(list).hasSize(1); }
@PublicAPI(usage = ACCESS) public Optional<String> getFileName() { return fileName; }
@Test public void source_file_name() { Source source = new Source(uriOf(Object.class), Optional.of("SomeClass.java"), false); assertThat(source.getFileName()).as("source file name").contains("SomeClass.java"); source = new Source(uriOf(Object.class), Optional.empty(), false); assertThat(source.getFileName()).as("source file name").isEmpty(); }
@Override public StatusOutputStream<Void> write(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { final EnumSet<OpenMode> flags; if(status.isAppend()) { if(status.isExists()) { // No append flag. Otherwise the offset field of SSH_FXP_WRITE requests is ignored. flags = EnumSet.of(OpenMode.WRITE); } else { // Allocate offset flags = EnumSet.of(OpenMode.CREAT, OpenMode.WRITE); } } else { // A new file is created; if the file already exists, it is opened and truncated to preserve ownership of file. if(status.isExists()) { if(file.isSymbolicLink()) { // Workaround for #7327 session.sftp().remove(file.getAbsolute()); flags = EnumSet.of(OpenMode.CREAT, OpenMode.TRUNC, OpenMode.WRITE); } else { flags = EnumSet.of(OpenMode.TRUNC, OpenMode.WRITE); } } else { flags = EnumSet.of(OpenMode.CREAT, OpenMode.TRUNC, OpenMode.WRITE); } } final RemoteFile handle = session.sftp().open(file.getAbsolute(), flags); final int maxUnconfirmedWrites = this.getMaxUnconfirmedWrites(status); if(log.isInfoEnabled()) { log.info(String.format("Using %d unconfirmed writes", maxUnconfirmedWrites)); } if(log.isInfoEnabled()) { log.info(String.format("Skipping %d bytes", status.getOffset())); } // Open stream at offset return new VoidStatusOutputStream(new ChunkedOutputStream(handle.new RemoteFileOutputStream(status.getOffset(), maxUnconfirmedWrites) { private final AtomicBoolean close = new AtomicBoolean(); @Override public void close() throws IOException { if(close.get()) { log.warn(String.format("Skip double close of stream %s", this)); return; } try { super.close(); } finally { handle.close(); close.set(true); } } }, preferences.getInteger("sftp.write.chunksize"))); } catch(IOException e) { throw new SFTPExceptionMappingService().map("Upload {0} failed", e, file); } }
@Test public void testWrite() throws Exception { final Path folder = new SFTPDirectoryFeature(session).mkdir(new Path(new SFTPHomeDirectoryService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final long folderModification = new SFTPAttributesFinderFeature(session).find(folder).getModificationDate(); final Path test = new Path(folder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); // Only seconds in modification date Thread.sleep(1000L); final TransferStatus status = new TransferStatus(); final int length = 1048576; final byte[] content = RandomUtils.nextBytes(length); status.setLength(content.length); status.setExists(false); final OutputStream out = new SFTPWriteFeature(session).write(test, status, new DisabledConnectionCallback()); assertNotNull(out); out.write(content); out.close(); assertNotEquals(folderModification, new SFTPAttributesFinderFeature(session).find(folder).getModificationDate()); assertTrue(new SFTPFindFeature(session).find(test)); assertEquals(content.length, new SFTPListService(session).list(test.getParent(), new DisabledListProgressListener()).get(test).attributes().getSize()); { final ByteArrayOutputStream buffer = new ByteArrayOutputStream(content.length); final InputStream in = new SFTPReadFeature(session).read(test, new TransferStatus().withLength(content.length), new DisabledConnectionCallback()); new StreamCopier(status, status).transfer(in, buffer); assertArrayEquals(content, buffer.toByteArray()); } { final ByteArrayOutputStream buffer = new ByteArrayOutputStream(content.length - 1); final InputStream in = new SFTPReadFeature(session).read(test, new TransferStatus().append(true).withOffset(1L), new DisabledConnectionCallback()); new StreamCopier(status, status).transfer(in, buffer); final byte[] reference = new byte[content.length - 1]; System.arraycopy(content, 1, reference, 0, content.length - 1); assertArrayEquals(reference, buffer.toByteArray()); } new SFTPDeleteFeature(session).delete(Arrays.asList(test, folder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static Permission getPermission(String name, String serviceName, String... actions) { PermissionFactory permissionFactory = PERMISSION_FACTORY_MAP.get(serviceName); if (permissionFactory == null) { throw new IllegalArgumentException("No permissions found for service: " + serviceName); } return permissionFactory.create(name, actions); }
@Test public void getPermission_Queue() { Permission permission = ActionConstants.getPermission("foo", QueueService.SERVICE_NAME); assertNotNull(permission); assertTrue(permission instanceof QueuePermission); }
@VisibleForTesting static void validateFips(final KsqlConfig config, final KsqlRestConfig restConfig) { if (config.getBoolean(ConfluentConfigs.ENABLE_FIPS_CONFIG)) { final FipsValidator fipsValidator = ConfluentConfigs.buildFipsValidator(); // validate cipher suites and TLS version validateCipherSuites(fipsValidator, restConfig); // validate broker validateBroker(fipsValidator, config); // validate ssl endpoint algorithm validateSslEndpointAlgo(fipsValidator, restConfig); // validate schema registry url validateSrUrl(fipsValidator, restConfig); // validate all listeners validateListeners(fipsValidator, restConfig); log.info("FIPS mode enabled for ksqlDB!"); } }
@Test public void shouldFailOnInvalidBrokerSecurityProtocol() { // Given: final KsqlConfig config = configWith(ImmutableMap.of( ConfluentConfigs.ENABLE_FIPS_CONFIG, true, CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SASL_PLAINTEXT.name )); final KsqlRestConfig restConfig = new KsqlRestConfig(ImmutableMap.<String, Object>builder() .put(KsqlRestConfig.SSL_CIPHER_SUITES_CONFIG, Collections.singletonList("TLS_RSA_WITH_AES_256_CCM")) .build() ); // When: final Exception e = assertThrows( SecurityException.class, () -> KsqlServerMain.validateFips(config, restConfig) ); // Then: assertThat(e.getMessage(), containsString( "FIPS 140-2 Configuration Error, invalid broker protocols: " + CommonClientConfigs.SECURITY_PROTOCOL_CONFIG + ":SASL_PLAINTEXT")); }
public static double tileYToLatitude(long tileY, byte zoomLevel) { return pixelYToLatitude(tileY * DUMMY_TILE_SIZE, getMapSize(zoomLevel, DUMMY_TILE_SIZE)); }
@Test public void tileYToLatitudeTest() { for (int tileSize : TILE_SIZES) { for (byte zoomLevel = ZOOM_LEVEL_MIN; zoomLevel <= ZOOM_LEVEL_MAX; ++zoomLevel) { double latitude = MercatorProjection.tileYToLatitude(0, zoomLevel); Assert.assertEquals(MercatorProjection.LATITUDE_MAX, latitude, 0); latitude = MercatorProjection.tileYToLatitudeWithScaleFactor(0, MercatorProjection.zoomLevelToScaleFactor(zoomLevel)); Assert.assertEquals(MercatorProjection.LATITUDE_MAX, latitude, 0); long tileY = MercatorProjection.getMapSize(zoomLevel, tileSize) / tileSize; latitude = MercatorProjection.tileYToLatitude(tileY, zoomLevel); Assert.assertEquals(MercatorProjection.LATITUDE_MIN, latitude, 0); tileY = MercatorProjection.getMapSizeWithScaleFactor(MercatorProjection.zoomLevelToScaleFactor(zoomLevel), tileSize) / tileSize; latitude = MercatorProjection.tileYToLatitudeWithScaleFactor(tileY, MercatorProjection.zoomLevelToScaleFactor(zoomLevel)); Assert.assertEquals(MercatorProjection.LATITUDE_MIN, latitude, 0); } } }
@Override public void emit(OutboundPacket packet) { store.emit(networkId(), packet); }
@Test public void emitTest() { OutboundPacket packet = new DefaultOutboundPacket(VDID1, DefaultTrafficTreatment.emptyTreatment(), ByteBuffer.allocate(5)); packetManager1.emit(packet); assertEquals("Packet not emitted correctly", packet, emittedPacket); }
public static synchronized int getAvailablePort() { int randomPort = getRandomPort(); return getAvailablePort(randomPort); }
@Test void testGetAvailablePort() { assertThat(NetUtils.getAvailablePort(), greaterThan(0)); assertThat(NetUtils.getAvailablePort(12345), greaterThanOrEqualTo(12345)); assertThat(NetUtils.getAvailablePort(-1), greaterThanOrEqualTo(0)); }
@Override public boolean tryClaim(Long i) { checkArgument( lastAttemptedOffset == null || i > lastAttemptedOffset, "Trying to claim offset %s while last attempted was %s", i, lastAttemptedOffset); checkArgument( i >= range.getFrom(), "Trying to claim offset %s before start of the range %s", i, range); lastAttemptedOffset = i; // No respective checkArgument for i < range.to() - it's ok to try claiming offsets beyond it. if (i >= range.getTo()) { return false; } lastClaimedOffset = i; return true; }
@Test public void testClaimBeforeStartOfRange() throws Exception { expected.expectMessage("Trying to claim offset 90 before start of the range [100, 200)"); OffsetRangeTracker tracker = new OffsetRangeTracker(new OffsetRange(100, 200)); tracker.tryClaim(90L); }
public CompletableFuture<SendPushNotificationResult> sendRegistrationChallengeNotification(final String deviceToken, final PushNotification.TokenType tokenType, final String challengeToken) { return sendNotification(new PushNotification(deviceToken, tokenType, PushNotification.NotificationType.CHALLENGE, challengeToken, null, null, true)) .thenApply(maybeResponse -> maybeResponse.orElseThrow(() -> new AssertionError("Responses must be present for urgent notifications"))); }
@Test void sendRegistrationChallengeNotification() { final String deviceToken = "token"; final String challengeToken = "challenge"; when(apnSender.sendNotification(any())) .thenReturn(CompletableFuture.completedFuture(new SendPushNotificationResult(true, Optional.empty(), false, Optional.empty()))); pushNotificationManager.sendRegistrationChallengeNotification(deviceToken, PushNotification.TokenType.APN_VOIP, challengeToken); verify(apnSender).sendNotification(new PushNotification(deviceToken, PushNotification.TokenType.APN_VOIP, PushNotification.NotificationType.CHALLENGE, challengeToken, null, null, true)); }
@Override public String getString() { return this.string; }
@Test public void testGetString() { ValueString vs = new ValueString( "Boden" ); assertEquals( "Boden", vs.getString() ); assertEquals( 0.0, vs.getNumber(), 0.0D ); assertNull( vs.getDate() ); // will fail parsing assertEquals( false, vs.getBoolean() ); assertEquals( 0, vs.getInteger() ); try { vs.getBigNumber(); fail( "Expected a NumberFormatException" ); } catch ( NumberFormatException ex ) { vs = null; } }
@Override protected void recover() throws Exception { // register Set<URL> recoverRegistered = new HashSet<>(getRegistered()); if (!recoverRegistered.isEmpty()) { if (logger.isInfoEnabled()) { logger.info("Recover register url " + recoverRegistered); } for (URL url : recoverRegistered) { // remove fail registry or unRegistry task first. removeFailedRegistered(url); removeFailedUnregistered(url); addFailedRegistered(url); } } // subscribe Map<URL, Set<NotifyListener>> recoverSubscribed = new HashMap<>(getSubscribed()); if (!recoverSubscribed.isEmpty()) { if (logger.isInfoEnabled()) { logger.info("Recover subscribe url " + recoverSubscribed.keySet()); } for (Map.Entry<URL, Set<NotifyListener>> entry : recoverSubscribed.entrySet()) { URL url = entry.getKey(); for (NotifyListener listener : entry.getValue()) { // First remove other tasks to ensure that addFailedSubscribed can succeed. removeFailedSubscribed(url, listener); addFailedSubscribed(url, listener); } } } }
@Test void testRecover() throws Exception { CountDownLatch countDownLatch = new CountDownLatch(6); final AtomicReference<Boolean> notified = new AtomicReference<Boolean>(false); NotifyListener listener = urls -> notified.set(Boolean.TRUE); MockRegistry mockRegistry = new MockRegistry(registryUrl, serviceUrl, countDownLatch); mockRegistry.register(serviceUrl); mockRegistry.subscribe(serviceUrl, listener); Assertions.assertEquals(1, mockRegistry.getRegistered().size()); Assertions.assertEquals(1, mockRegistry.getSubscribed().size()); mockRegistry.recover(); countDownLatch.await(); Assertions.assertEquals(0, mockRegistry.getFailedRegistered().size()); FailbackRegistry.Holder h = new FailbackRegistry.Holder(registryUrl, listener); Assertions.assertNull(mockRegistry.getFailedSubscribed().get(h)); Assertions.assertEquals(countDownLatch.getCount(), 0); }
@InvokeOnHeader(CONTROL_ACTION_SUBSCRIBE) public void performSubscribe(final Message message, AsyncCallback callback) { String filterId; if (message.getBody() instanceof DynamicRouterControlMessage) { filterId = subscribeFromMessage(dynamicRouterControlService, message, false); } else { filterId = subscribeFromHeaders(dynamicRouterControlService, message, false); } message.setBody(filterId); callback.done(false); }
@Test void performSubscribeActionWithEmptyExpressionLanguage() { String subscribeChannel = "testChannel"; Map<String, Object> headers = Map.of( CONTROL_ACTION_HEADER, CONTROL_ACTION_SUBSCRIBE, CONTROL_SUBSCRIBE_CHANNEL, subscribeChannel, CONTROL_SUBSCRIPTION_ID, "testId", CONTROL_DESTINATION_URI, "mock://test", CONTROL_PREDICATE, "true", CONTROL_EXPRESSION_LANGUAGE, "", CONTROL_PRIORITY, 10); when(message.getHeaders()).thenReturn(headers); Mockito.doNothing().when(callback).done(false); producer.performSubscribe(message, callback); Mockito.verify(controlService, Mockito.times(1)).subscribeWithPredicateInstance( subscribeChannel, "testId", "mock://test", 10, null, false); }
@Override public void commence(final HttpServletRequest httpServletRequest, final HttpServletResponse httpServletResponse, final AuthenticationException authenticationException) throws IOException { httpServletResponse.setContentType(MediaType.APPLICATION_JSON_VALUE); httpServletResponse.setStatus(HttpStatus.UNAUTHORIZED.value()); final CustomError customError = CustomError.builder() .header(CustomError.Header.AUTH_ERROR.getName()) .httpStatus(HttpStatus.UNAUTHORIZED) .isSuccess(false) .build(); final String responseBody = OBJECT_MAPPER .writer(DateFormat.getDateInstance()) .writeValueAsString(customError); httpServletResponse.getOutputStream() .write(responseBody.getBytes()); }
@Test public void testCommence() throws IOException { // Mock objects HttpServletRequest httpServletRequest = mock(HttpServletRequest.class); HttpServletResponse httpServletResponse = mock(HttpServletResponse.class); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); ServletOutputStream servletOutputStream = new ServletOutputStream() { @Override public void write(int b) throws IOException { byteArrayOutputStream.write(b); } @Override public boolean isReady() { return true; } @Override public void setWriteListener(WriteListener writeListener) { // No-op } }; // Set up the mocks when(httpServletResponse.getOutputStream()).thenReturn(servletOutputStream); // Call the method to test customAuthenticationEntryPoint.commence(httpServletRequest, httpServletResponse, new AuthenticationException("Test") {}); // Verify that the response status was set verify(httpServletResponse).setStatus(HttpStatus.UNAUTHORIZED.value()); verify(httpServletResponse).setContentType(MediaType.APPLICATION_JSON_VALUE); // Convert the response to a string and verify the content String responseBody = byteArrayOutputStream.toString(); // Use ByteArrayOutputStream ObjectMapper objectMapper = new ObjectMapper(); objectMapper.registerModule(new JavaTimeModule()); CustomError expectedCustomError = CustomError.builder() .header(CustomError.Header.AUTH_ERROR.getName()) .httpStatus(HttpStatus.UNAUTHORIZED) .isSuccess(false) .build(); String expectedResponseBody = objectMapper.writeValueAsString(expectedCustomError); // Parse the JSON response and expected response JsonNode responseNode = objectMapper.readTree(responseBody); JsonNode expectedNode = objectMapper.readTree(expectedResponseBody); // Extract and format the 'time' fields String responseTime = responseNode.get("time").asText(); JsonNode expectedTimeNode = expectedNode.get("time"); // Define a DateTimeFormatter to compare up to minutes DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME; // Parse the time strings into LocalDateTime objects LocalDateTime responseDateTime = LocalDateTime.parse(responseTime, formatter); LocalDateTime expectedDateTime = convertArrayToLocalDateTime(expectedTimeNode); // Truncate to minutes for comparison responseDateTime = responseDateTime.truncatedTo(ChronoUnit.MINUTES); expectedDateTime = expectedDateTime.truncatedTo(ChronoUnit.MINUTES); // Compare only the date and time up to minutes assertEquals(expectedDateTime, responseDateTime); }
@Nullable @Override public Message decode(@Nonnull final RawMessage rawMessage) { final GELFMessage gelfMessage = new GELFMessage(rawMessage.getPayload(), rawMessage.getRemoteAddress()); final String json = gelfMessage.getJSON(decompressSizeLimit, charset); final JsonNode node; try { node = objectMapper.readTree(json); if (node == null) { throw new IOException("null result"); } } catch (final Exception e) { log.error("Could not parse JSON, first 400 characters: " + StringUtils.abbreviate(json, 403), e); throw new IllegalStateException("JSON is null/could not be parsed (invalid JSON)", e); } try { validateGELFMessage(node, rawMessage.getId(), rawMessage.getRemoteAddress()); } catch (IllegalArgumentException e) { log.trace("Invalid GELF message <{}>", node); throw e; } // Timestamp. final double messageTimestamp = timestampValue(node); final DateTime timestamp; if (messageTimestamp <= 0) { timestamp = rawMessage.getTimestamp(); } else { // we treat this as a unix timestamp timestamp = Tools.dateTimeFromDouble(messageTimestamp); } final Message message = messageFactory.createMessage( stringValue(node, "short_message"), stringValue(node, "host"), timestamp ); message.addField(Message.FIELD_FULL_MESSAGE, stringValue(node, "full_message")); final String file = stringValue(node, "file"); if (file != null && !file.isEmpty()) { message.addField("file", file); } final long line = longValue(node, "line"); if (line > -1) { message.addField("line", line); } // Level is set by server if not specified by client. final int level = intValue(node, "level"); if (level > -1) { message.addField("level", level); } // Facility is set by server if not specified by client. final String facility = stringValue(node, "facility"); if (facility != null && !facility.isEmpty()) { message.addField("facility", facility); } // Add additional data if there is some. final Iterator<Map.Entry<String, JsonNode>> fields = node.fields(); while (fields.hasNext()) { final Map.Entry<String, JsonNode> entry = fields.next(); String key = entry.getKey(); // Do not index useless GELF "version" field. if ("version".equals(key)) { continue; } // Don't include GELF syntax underscore in message field key. if (key.startsWith("_") && key.length() > 1) { key = key.substring(1); } // We already set short_message and host as message and source. Do not add as fields again. if ("short_message".equals(key) || "host".equals(key)) { continue; } // Skip standard or already set fields. if (message.getField(key) != null || Message.RESERVED_FIELDS.contains(key) && !Message.RESERVED_SETTABLE_FIELDS.contains(key)) { continue; } // Convert JSON containers to Strings, and pick a suitable number representation. final JsonNode value = entry.getValue(); final Object fieldValue; if (value.isContainerNode()) { fieldValue = value.toString(); } else if (value.isFloatingPointNumber()) { fieldValue = value.asDouble(); } else if (value.isIntegralNumber()) { fieldValue = value.asLong(); } else if (value.isNull()) { log.debug("Field [{}] is NULL. Skipping.", key); continue; } else if (value.isTextual()) { fieldValue = value.asText(); } else { log.debug("Field [{}] has unknown value type. Skipping.", key); continue; } message.addField(key, fieldValue); } return message; }
@Test public void decodeFailsWithWrongTypeForHost() throws Exception { final String json = "{" + "\"version\": \"1.1\"," + "\"host\": 42," + "\"short_message\": \"A short message that helps you identify what is going on\"" + "}"; final RawMessage rawMessage = new RawMessage(json.getBytes(StandardCharsets.UTF_8)); assertThatIllegalArgumentException().isThrownBy(() -> codec.decode(rawMessage)) .withNoCause() .withMessageMatching("GELF message <[0-9a-f-]+> has invalid \"host\": 42"); }
public void add(CSQueue queue) { String fullName = queue.getQueuePath(); String shortName = queue.getQueueShortName(); try { modificationLock.writeLock().lock(); fullNameQueues.put(fullName, queue); getMap.put(fullName, queue); //we only update short queue name ambiguity for non root queues if (!shortName.equals(CapacitySchedulerConfiguration.ROOT)) { //getting or creating the ambiguity set for the current queue Set<String> fullNamesSet = this.shortNameToLongNames.getOrDefault(shortName, new HashSet<>()); //adding the full name to the queue fullNamesSet.add(fullName); this.shortNameToLongNames.put(shortName, fullNamesSet); } //updating the getMap references for the queue updateGetMapForShortName(shortName); } finally { modificationLock.writeLock().unlock(); } }
@Test public void testSimpleMapping() throws IOException { CSQueueStore store = new CSQueueStore(); //root.main CSQueue main = createParentQueue("main", root); //root.main.A CSQueue mainA = createLeafQueue("A", main); //root.main.B CSQueue mainB = createParentQueue("B", main); //root.main.B.C CSQueue mainBC = createLeafQueue("C", mainB); store.add(main); store.add(mainA); store.add(mainB); store.add(mainBC); assertAccessibleByAllNames(store, main); assertAccessibleByAllNames(store, mainA); assertAccessibleByAllNames(store, mainB); assertAccessibleByAllNames(store, mainBC); }
public int printTopology() throws IOException { DistributedFileSystem dfs = getDFS(); final DatanodeInfo[] report = dfs.getDataNodeStats(); // Build a map of rack -> nodes from the datanode report Map<String, HashMap<String, String>> map = new HashMap<>(); for(DatanodeInfo dni : report) { String location = dni.getNetworkLocation(); String name = dni.getName(); String dnState = dni.getAdminState().toString(); if(!map.containsKey(location)) { map.put(location, new HashMap<>()); } Map<String, String> node = map.get(location); node.put(name, dnState); } // Sort the racks (and nodes) alphabetically, display in order List<String> racks = new ArrayList<>(map.keySet()); Collections.sort(racks); for(String r : racks) { System.out.println("Rack: " + r); Map<String, String> nodes = map.get(r); for(Map.Entry<String, String> entry : nodes.entrySet()) { String n = entry.getKey(); System.out.print(" " + n); String hostname = NetUtils.getHostNameOfIP(n); if(hostname != null) { System.out.print(" (" + hostname + ")"); } System.out.print(" " + entry.getValue()); System.out.println(); } System.out.println(); } return 0; }
@Test(timeout = 30000) public void testPrintTopology() throws Exception { redirectStream(); /* init conf */ final Configuration dfsConf = new HdfsConfiguration(); final File baseDir = new File( PathUtils.getTestDir(getClass()), GenericTestUtils.getMethodName()); dfsConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath()); final int numDn = 4; final String[] racks = { "/d1/r1", "/d1/r2", "/d2/r1", "/d2/r2"}; /* init cluster using topology */ try (MiniDFSCluster miniCluster = new MiniDFSCluster.Builder(dfsConf) .numDataNodes(numDn).racks(racks).build()) { miniCluster.waitActive(); assertEquals(numDn, miniCluster.getDataNodes().size()); final DFSAdmin dfsAdmin = new DFSAdmin(dfsConf); resetStream(); final int ret = ToolRunner.run(dfsAdmin, new String[] {"-printTopology"}); /* collect outputs */ final List<String> outs = Lists.newArrayList(); scanIntoList(out, outs); /* verify results */ assertEquals(0, ret); assertEquals( "There should be three lines per Datanode: the 1st line is" + " rack info, 2nd node info, 3rd empty line. The total" + " should be as a result of 3 * numDn.", 12, outs.size()); assertThat(outs.get(0), is(allOf(containsString("Rack:"), containsString("/d1/r1")))); assertThat(outs.get(3), is(allOf(containsString("Rack:"), containsString("/d1/r2")))); assertThat(outs.get(6), is(allOf(containsString("Rack:"), containsString("/d2/r1")))); assertThat(outs.get(9), is(allOf(containsString("Rack:"), containsString("/d2/r2")))); } }
@Override public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException { this.trash(files, callback, false); }
@Test public void testDeleteRevertDeletePurgeFile() throws Exception { final DeepboxIdProvider nodeid = new DeepboxIdProvider(session); final Path parentFolder = new Path("/ORG 4 - DeepBox Desktop App/ORG3:Box1/Documents/Auditing", EnumSet.of(Path.Type.directory)); final Path trash = new Path("/ORG 4 - DeepBox Desktop App/ORG3:Box1/Trash", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new Path(parentFolder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final Path testInTrash = new Path(trash, test.getName(), EnumSet.of(Path.Type.file)); new DeepboxTouchFeature(session, nodeid).touch(test, new TransferStatus()); assertTrue(new DeepboxFindFeature(session, nodeid).find(test.withAttributes(new PathAttributes()))); assertFalse(new DeepboxFindFeature(session, nodeid).find(testInTrash.withAttributes(new PathAttributes()))); assertTrue(new DeepboxAttributesFinderFeature(session, nodeid).find(test).getAcl().get(new Acl.CanonicalUser()).contains(CANDELETE)); assertTrue(new DeepboxAttributesFinderFeature(session, nodeid).find(test).getAcl().get(new Acl.CanonicalUser()).contains(CANPURGE)); assertFalse(new DeepboxAttributesFinderFeature(session, nodeid).find(test).getAcl().get(new Acl.CanonicalUser()).contains(CANREVERT)); new DeepboxTrashFeature(session, nodeid).delete(Collections.singletonList(test.withAttributes(new PathAttributes())), new DisabledPasswordCallback(), new Delete.DisabledCallback()); assertFalse(new DeepboxFindFeature(session, nodeid).find(test.withAttributes(new PathAttributes()))); assertTrue(new DeepboxFindFeature(session, nodeid).find(testInTrash.withAttributes(new PathAttributes()))); assertFalse(new DeepboxAttributesFinderFeature(session, nodeid).find(testInTrash).getAcl().get(new Acl.CanonicalUser()).contains(CANDELETE)); assertTrue(new DeepboxAttributesFinderFeature(session, nodeid).find(testInTrash).getAcl().get(new Acl.CanonicalUser()).contains(CANPURGE)); assertTrue(new DeepboxAttributesFinderFeature(session, nodeid).find(testInTrash).getAcl().get(new Acl.CanonicalUser()).contains(CANREVERT)); new DeepboxRestoreFeature(session, nodeid).restore(testInTrash.withAttributes(new PathAttributes()), new DisabledLoginCallback()); assertTrue(new DeepboxFindFeature(session, nodeid).find(test.withAttributes(new PathAttributes()))); assertFalse(new DeepboxFindFeature(session, nodeid).find(testInTrash.withAttributes(new PathAttributes()))); assertTrue(new DeepboxAttributesFinderFeature(session, nodeid).find(test).getAcl().get(new Acl.CanonicalUser()).contains(CANDELETE)); assertTrue(new DeepboxAttributesFinderFeature(session, nodeid).find(test).getAcl().get(new Acl.CanonicalUser()).contains(CANPURGE)); assertFalse(new DeepboxAttributesFinderFeature(session, nodeid).find(test).getAcl().get(new Acl.CanonicalUser()).contains(CANREVERT)); new DeepboxTrashFeature(session, nodeid).delete(Collections.singletonList(test.withAttributes(new PathAttributes())), new DisabledPasswordCallback(), new Delete.DisabledCallback()); assertFalse(new DeepboxFindFeature(session, nodeid).find(test.withAttributes(new PathAttributes()))); assertTrue(new DeepboxFindFeature(session, nodeid).find(testInTrash.withAttributes(new PathAttributes()))); assertFalse(new DeepboxAttributesFinderFeature(session, nodeid).find(testInTrash).getAcl().get(new Acl.CanonicalUser()).contains(CANDELETE)); assertTrue(new DeepboxAttributesFinderFeature(session, nodeid).find(testInTrash).getAcl().get(new Acl.CanonicalUser()).contains(CANPURGE)); assertTrue(new DeepboxAttributesFinderFeature(session, nodeid).find(testInTrash).getAcl().get(new Acl.CanonicalUser()).contains(CANREVERT)); new DeepboxTrashFeature(session, nodeid).delete(Collections.singletonList(testInTrash.withAttributes(new PathAttributes())), new DisabledPasswordCallback(), new Delete.DisabledCallback()); assertFalse(new DeepboxFindFeature(session, nodeid).find(test.withAttributes(new PathAttributes()))); assertFalse(new DeepboxFindFeature(session, nodeid).find(testInTrash.withAttributes(new PathAttributes()))); }
public boolean streq(String str) { return streq(data, str); }
@Test public void testStreq() { ZData data = new ZData("test".getBytes(ZMQ.CHARSET)); assertThat(data.streq("test"), is(true)); }
@Override public URI resolveHostUri() { InetSocketAddress host = resolveHost(); String hostUrl = serviceUri.getServiceScheme() + "://" + host.getHostString() + ":" + host.getPort(); return URI.create(hostUrl); }
@Test(expectedExceptions = IllegalStateException.class) public void testResolveUrlBeforeUpdateServiceUrl() { resolver.resolveHostUri(); }
@Override public void run() { // top-level command, do nothing }
@Test public void test_cancelJob_byJobId() { // Given Job job = newJob(); // When run("cancel", job.getIdString()); // Then assertThat(job).eventuallyHasStatus(JobStatus.FAILED); }
@Override public DescriptiveUrlBag toUrl(final Path file) { final DescriptiveUrlBag list = new DescriptiveUrlBag(); if(new HostPreferences(session.getHost()).getBoolean("s3.bucket.virtualhost.disable")) { list.addAll(new DefaultUrlProvider(session.getHost()).toUrl(file)); } else { list.add(this.toUrl(file, session.getHost().getProtocol().getScheme(), session.getHost().getPort())); list.add(this.toUrl(file, Scheme.http, 80)); if(StringUtils.isNotBlank(session.getHost().getWebURL())) { // Only include when custom domain is configured list.addAll(new HostWebUrlProvider(session.getHost()).toUrl(file)); } } if(file.isFile()) { if(!session.getHost().getCredentials().isAnonymousLogin()) { // X-Amz-Expires must be less than a week (in seconds); that is, the given X-Amz-Expires must be less // than 604800 seconds // In one hour list.add(this.toSignedUrl(file, (int) TimeUnit.HOURS.toSeconds(1))); // Default signed URL expiring in 24 hours. list.add(this.toSignedUrl(file, (int) TimeUnit.SECONDS.toSeconds( new HostPreferences(session.getHost()).getInteger("s3.url.expire.seconds")))); // 1 Week list.add(this.toSignedUrl(file, (int) TimeUnit.DAYS.toSeconds(7))); switch(session.getSignatureVersion()) { case AWS2: // 1 Month list.add(this.toSignedUrl(file, (int) TimeUnit.DAYS.toSeconds(30))); // 1 Year list.add(this.toSignedUrl(file, (int) TimeUnit.DAYS.toSeconds(365))); break; case AWS4HMACSHA256: break; } } } // AWS services require specifying an Amazon S3 bucket using S3://bucket list.add(new DescriptiveUrl(URI.create(String.format("s3://%s%s", containerService.getContainer(file).getName(), file.isRoot() ? Path.DELIMITER : containerService.isContainer(file) ? Path.DELIMITER : String.format("/%s", URIEncoder.encode(containerService.getKey(file))))), DescriptiveUrl.Type.provider, MessageFormat.format(LocaleFactory.localizedString("{0} URL"), "S3"))); // Filter by matching container name final Optional<Set<Distribution>> filtered = distributions.entrySet().stream().filter(entry -> new SimplePathPredicate(containerService.getContainer(file)).test(entry.getKey())) .map(Map.Entry::getValue).findFirst(); if(filtered.isPresent()) { // Add CloudFront distributions for(Distribution distribution : filtered.get()) { list.addAll(new DistributionUrlProvider(distribution).toUrl(file)); } } return list; }
@Test public void testToSignedUrlAnonymous() throws Exception { final S3Session session = new S3Session(new Host(new S3Protocol(), new S3Protocol().getDefaultHostname(), new Credentials("anonymous", null))) { @Override public RequestEntityRestStorageService getClient() { try { return this.connect(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); } catch(BackgroundException e) { fail(); throw new RuntimeException(e); } } }; assertEquals(DescriptiveUrl.EMPTY, new S3UrlProvider(session, Collections.emptyMap(), new DisabledPasswordStore() { @Override public String findLoginPassword(final Host bookmark) { return "k"; } }).toUrl(new Path("/test-eu-west-1-cyberduck/test f", EnumSet.of(Path.Type.file))).find(DescriptiveUrl.Type.signed) ); }
@Override public Map<RedisClusterNode, Collection<RedisClusterNode>> clusterGetMasterSlaveMap() { Iterable<RedisClusterNode> res = clusterGetNodes(); Set<RedisClusterNode> masters = new HashSet<RedisClusterNode>(); for (Iterator<RedisClusterNode> iterator = res.iterator(); iterator.hasNext();) { RedisClusterNode redisClusterNode = iterator.next(); if (redisClusterNode.isMaster()) { masters.add(redisClusterNode); } } Map<RedisClusterNode, Collection<RedisClusterNode>> result = new HashMap<RedisClusterNode, Collection<RedisClusterNode>>(); for (Iterator<RedisClusterNode> iterator = res.iterator(); iterator.hasNext();) { RedisClusterNode redisClusterNode = iterator.next(); for (RedisClusterNode masterNode : masters) { if (redisClusterNode.getMasterId() != null && redisClusterNode.getMasterId().equals(masterNode.getId())) { Collection<RedisClusterNode> list = result.get(masterNode); if (list == null) { list = new ArrayList<RedisClusterNode>(); result.put(masterNode, list); } list.add(redisClusterNode); } } } return result; }
@Test public void testClusterGetMasterSlaveMap() { Map<RedisClusterNode, Collection<RedisClusterNode>> map = connection.clusterGetMasterSlaveMap(); assertThat(map).hasSize(3); for (Collection<RedisClusterNode> slaves : map.values()) { assertThat(slaves).hasSize(1); } }
@Override public Collection<String> getJdbcUrlPrefixes() { return Collections.singleton(String.format("jdbc:%s:", getType().toLowerCase())); }
@Test void assertGetJdbcUrlPrefixes() { assertThat(TypedSPILoader.getService(DatabaseType.class, "H2").getJdbcUrlPrefixes(), is(Collections.singleton("jdbc:h2:"))); }
@Override public void createFunction(SqlInvokedFunction function, boolean replace) { checkCatalog(function); checkFunctionLanguageSupported(function); checkArgument(!function.hasVersion(), "function '%s' is already versioned", function); QualifiedObjectName functionName = function.getFunctionId().getFunctionName(); checkFieldLength("Catalog name", functionName.getCatalogName(), MAX_CATALOG_NAME_LENGTH); checkFieldLength("Schema name", functionName.getSchemaName(), MAX_SCHEMA_NAME_LENGTH); if (!functionNamespaceDao.functionNamespaceExists(functionName.getCatalogName(), functionName.getSchemaName())) { throw new PrestoException(NOT_FOUND, format("Function namespace not found: %s", functionName.getCatalogSchemaName())); } checkFieldLength("Function name", functionName.getObjectName(), MAX_FUNCTION_NAME_LENGTH); if (function.getParameters().size() > MAX_PARAMETER_COUNT) { throw new PrestoException(GENERIC_USER_ERROR, format("Function has more than %s parameters: %s", MAX_PARAMETER_COUNT, function.getParameters().size())); } for (Parameter parameter : function.getParameters()) { checkFieldLength("Parameter name", parameter.getName(), MAX_PARAMETER_NAME_LENGTH); } checkFieldLength( "Parameter type list", function.getFunctionId().getArgumentTypes().stream() .map(TypeSignature::toString) .collect(joining(",")), MAX_PARAMETER_TYPES_LENGTH); checkFieldLength("Return type", function.getSignature().getReturnType().toString(), MAX_RETURN_TYPE_LENGTH); jdbi.useTransaction(handle -> { FunctionNamespaceDao transactionDao = handle.attach(functionNamespaceDaoClass); Optional<SqlInvokedFunctionRecord> latestVersion = transactionDao.getLatestRecordForUpdate(hash(function.getFunctionId()), function.getFunctionId()); if (!replace && latestVersion.isPresent() && !latestVersion.get().isDeleted()) { throw new PrestoException(ALREADY_EXISTS, "Function already exists: " + function.getFunctionId()); } if (!latestVersion.isPresent() || !latestVersion.get().getFunction().hasSameDefinitionAs(function)) { long newVersion = latestVersion.map(SqlInvokedFunctionRecord::getFunction).map(MySqlFunctionNamespaceManager::getLongVersion).orElse(0L) + 1; insertSqlInvokedFunction(transactionDao, function, newVersion); } else if (latestVersion.get().isDeleted()) { SqlInvokedFunction latest = latestVersion.get().getFunction(); checkState(latest.hasVersion(), "Function version missing: %s", latest.getFunctionId()); transactionDao.setDeletionStatus(hash(latest.getFunctionId()), latest.getFunctionId(), getLongVersion(latest), false); } }); refreshFunctionsCache(functionName); }
@Test public void testCreateFunction() { assertListFunctions(); createFunction(FUNCTION_POWER_TOWER_DOUBLE, false); assertListFunctions(FUNCTION_POWER_TOWER_DOUBLE.withVersion("1")); createFunction(FUNCTION_POWER_TOWER_DOUBLE_UPDATED, true); assertListFunctions(FUNCTION_POWER_TOWER_DOUBLE_UPDATED.withVersion("2")); assertGetFunctions(POWER_TOWER, FUNCTION_POWER_TOWER_DOUBLE_UPDATED.withVersion("2")); createFunction(FUNCTION_POWER_TOWER_INT, true); assertListFunctions(FUNCTION_POWER_TOWER_DOUBLE_UPDATED.withVersion("2"), FUNCTION_POWER_TOWER_INT.withVersion("1")); assertGetFunctions(POWER_TOWER, FUNCTION_POWER_TOWER_DOUBLE_UPDATED.withVersion("2"), FUNCTION_POWER_TOWER_INT.withVersion("1")); createFunction(FUNCTION_TANGENT, true); assertListFunctions(FUNCTION_POWER_TOWER_DOUBLE_UPDATED.withVersion("2"), FUNCTION_POWER_TOWER_INT.withVersion("1"), FUNCTION_TANGENT.withVersion("1")); assertGetFunctions(POWER_TOWER, FUNCTION_POWER_TOWER_DOUBLE_UPDATED.withVersion("2"), FUNCTION_POWER_TOWER_INT.withVersion("1")); assertGetFunctions(TANGENT, FUNCTION_TANGENT.withVersion("1")); }
@Override public void clear() { partitionMaps.clear(); }
@Test public void testClear() { PartitionMap<String> map = PartitionMap.create(SPECS); map.put(UNPARTITIONED_SPEC.specId(), null, "v1"); map.put(BY_DATA_SPEC.specId(), Row.of("aaa"), "v2"); assertThat(map).hasSize(2); map.clear(); assertThat(map).isEmpty(); }
Location append(String relativeURI) { relativeURI = encodeIllegalCharacters(relativeURI); if (uri.toString().endsWith("/") && relativeURI.startsWith("/")) { relativeURI = relativeURI.substring(1); } if (!uri.toString().endsWith("/") && !relativeURI.startsWith("/")) { relativeURI = "/" + relativeURI; } return Location.of(URI.create(uri + relativeURI)); }
@Test @UseDataProvider("base_locations") public void append(Location location) { Location appendAbsolute = location.append("/bar/baz"); Location appendRelative = location.append("bar/baz"); Location expected = Location.of(Paths.get("/some/path/bar/baz")); assertThat(appendAbsolute).isEqualTo(expected); assertThat(appendRelative).isEqualTo(expected); }
public Optional<Column> findColumn(final ColumnName columnName) { return findColumnMatching(withName(columnName)); }
@Test public void shouldGetValueColumns() { assertThat(SOME_SCHEMA.findColumn(F0), is(Optional.of( Column.of(F0, STRING, Namespace.VALUE, 0) ))); }
public static String getDesc(Class<?> c) { StringBuilder ret = new StringBuilder(); while (c.isArray()) { ret.append('['); c = c.getComponentType(); } if (c.isPrimitive()) { String t = c.getName(); if ("void".equals(t)) { ret.append(JVM_VOID); } else if ("boolean".equals(t)) { ret.append(JVM_BOOLEAN); } else if ("byte".equals(t)) { ret.append(JVM_BYTE); } else if ("char".equals(t)) { ret.append(JVM_CHAR); } else if ("double".equals(t)) { ret.append(JVM_DOUBLE); } else if ("float".equals(t)) { ret.append(JVM_FLOAT); } else if ("int".equals(t)) { ret.append(JVM_INT); } else if ("long".equals(t)) { ret.append(JVM_LONG); } else if ("short".equals(t)) { ret.append(JVM_SHORT); } } else { ret.append('L'); ret.append(c.getName().replace('.', '/')); ret.append(';'); } return ret.toString(); }
@Test void testGetDescConstructor() { assertThat(ReflectUtils.getDesc(Foo2.class.getConstructors()[0]), equalTo("(Ljava/util/List;[I)V")); }
@Override public void read(ChannelHandlerContext ctx) throws Exception { if (dequeue(ctx, 1) == 0) { // It seems no messages were consumed. We need to read() some // messages from upstream and once one arrives it need to be // relayed to downstream to keep the flow going. shouldConsume = true; ctx.read(); } else if (config.isAutoRead()) { ctx.read(); } }
@Test public void testAutoReadingOff() throws Exception { final Exchanger<Channel> peerRef = new Exchanger<Channel>(); final CountDownLatch latch = new CountDownLatch(3); ChannelInboundHandlerAdapter handler = new ChannelInboundHandlerAdapter() { @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { peerRef.exchange(ctx.channel(), 1L, SECONDS); ctx.fireChannelActive(); } @Override public void channelRead(ChannelHandlerContext ctx, Object msg) { ReferenceCountUtil.release(msg); latch.countDown(); } }; Channel server = newServer(false, handler); Channel client = newClient(server.localAddress()); try { // The client connection on the server side Channel peer = peerRef.exchange(null, 1L, SECONDS); // Write the message client.writeAndFlush(newOneMessage()) .syncUninterruptibly(); // Read the message peer.read(); // We received all three messages but hoped that only one // message was read because auto reading was off and we // invoked the read() method only once. assertTrue(latch.await(1L, SECONDS)); } finally { client.close(); server.close(); } }
public static String resolvePassword( VariableSpace variables, String password ) { String resolvedPassword = variables.environmentSubstitute( password ); if ( resolvedPassword != null ) { // returns resolved decrypted password return Encr.decryptPasswordOptionallyEncrypted( resolvedPassword ); } else { // actually null return resolvedPassword; } }
@Test public void testResolvePasswordNull() { String password = null; // null is valid input parameter assertSame( password, Utils.resolvePassword( Variables.getADefaultVariableSpace(), password ) ); }
public synchronized void synchronizePartitionSchemas( PartitionSchema partitionSchema ) { synchronizePartitionSchemas( partitionSchema, partitionSchema.getName() ); }
@Test public void synchronizePartitionSchemas_should_not_sync_unshared() throws Exception { final String partitionSchemaName = "PartitionSchema"; TransMeta transformarion1 = createTransMeta(); PartitionSchema partitionSchema1 = createPartitionSchema( partitionSchemaName, true ); transformarion1.setPartitionSchemas( Collections.singletonList( partitionSchema1 ) ); spoonDelegates.trans.addTransformation( transformarion1 ); TransMeta transformarion2 = createTransMeta(); PartitionSchema partitionSchema2 = createPartitionSchema( partitionSchemaName, false ); transformarion2.setPartitionSchemas( Collections.singletonList( partitionSchema2 ) ); spoonDelegates.trans.addTransformation( transformarion2 ); partitionSchema2.setNumberOfPartitionsPerSlave( AFTER_SYNC_VALUE ); sharedUtil.synchronizePartitionSchemas( partitionSchema2 ); assertThat( partitionSchema1.getNumberOfPartitionsPerSlave(), equalTo( BEFORE_SYNC_VALUE ) ); }
@Override public void upgrade() { if (clusterConfigService.get(MigrationCompleted.class) != null) { LOG.debug("Migration already completed!"); return; } final List<SearchPivotLimitMigration> pivotLimitMigrations = StreamSupport.stream(this.searches.find().spliterator(), false) .flatMap(document -> { final String searchId = document.get("_id", ObjectId.class).toHexString(); final List<Document> queries = document.get("queries", Collections.emptyList()); return EntryStream.of(queries) .flatMap(entry -> { final Integer queryIndex = entry.getKey(); final List<Document> searchTypes = entry.getValue().get("search_types", Collections.emptyList()); return EntryStream.of(searchTypes) .filter(searchType -> "pivot".equals(searchType.getValue().getString("type"))) .flatMap(searchTypeEntry -> { final Document searchType = searchTypeEntry.getValue(); final Integer searchTypeIndex = searchTypeEntry.getKey(); final boolean hasRowLimit = searchType.containsKey("row_limit"); final boolean hasColumnLimit = searchType.containsKey("column_limit"); final Optional<Integer> rowLimit = Optional.ofNullable(searchType.getInteger("row_limit")); final Optional<Integer> columnLimit = Optional.ofNullable(searchType.getInteger("column_limit")); if (searchTypeIndex != null && (hasRowLimit || hasColumnLimit)) { return Stream.of(new SearchPivotLimitMigration(searchId, queryIndex, searchTypeIndex, rowLimit, columnLimit)); } return Stream.empty(); }); }); }) .collect(Collectors.toList()); final List<WriteModel<Document>> operations = pivotLimitMigrations.stream() .flatMap(pivotMigration -> { final ImmutableList.Builder<WriteModel<Document>> builder = ImmutableList.builder(); builder.add( updateSearch( pivotMigration.searchId(), doc("$unset", doc(pivotPath(pivotMigration) + ".row_limit", 1)) ) ); builder.add( updateSearch( pivotMigration.searchId(), doc("$set", doc(pivotPath(pivotMigration) + ".row_groups.$[pivot].limit", pivotMigration.rowLimit.orElse(DEFAULT_LIMIT))), matchValuePivots ) ); builder.add( updateSearch( pivotMigration.searchId(), doc("$unset", doc(pivotPath(pivotMigration) + ".column_limit", 1)) ) ); builder.add( updateSearch( pivotMigration.searchId(), doc("$set", doc(pivotPath(pivotMigration) + ".column_groups.$[pivot].limit", pivotMigration.columnLimit.orElse(DEFAULT_LIMIT))), matchValuePivots ) ); return builder.build().stream(); }) .collect(Collectors.toList()); if (!operations.isEmpty()) { LOG.debug("Updating {} search types ...", pivotLimitMigrations.size()); this.searches.bulkWrite(operations); } clusterConfigService.write(new MigrationCompleted(pivotLimitMigrations.size())); }
@Test @MongoDBFixtures("V20230113095301_MigrateGlobalPivotLimitsToGroupingsInSearchesTest_null_limits.json") void migratingPivotsWithNullLimits() { this.migration.upgrade(); assertThat(migrationCompleted().migratedSearchTypes()).isEqualTo(2); final Document document = this.collection.find().first(); final List<Document> searchTypes = getSearchTypes(document); for (Document searchType : searchTypes) { assertThatFieldsAreUnset(searchType); } }
public int getCodeLength() { return codeLength; }
@Test void testCodeLength() { byte[] startBytes1 = { 0x00 }; byte[] endBytes1 = { 0x20 }; CodespaceRange range1 = new CodespaceRange(startBytes1, endBytes1); assertEquals(1, range1.getCodeLength()); byte[] startBytes2 = { 0x00, 0x00 }; byte[] endBytes2 = { 0x01, 0x20 }; CodespaceRange range2 = new CodespaceRange(startBytes2, endBytes2); assertEquals(2, range2.getCodeLength()); }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldChooseNonVarargWithNullValues() { // Given: givenFunctions( function(EXPECTED, -1, STRING), function(OTHER, 0, STRING_VARARGS) ); // When: final KsqlScalarFunction fun = udfIndex.getFunction(Collections.singletonList(null)); // Then: assertThat(fun.name(), equalTo(EXPECTED)); }
public RingbufferConfig setCapacity(int capacity) { this.capacity = checkPositive("capacity", capacity); return this; }
@Test(expected = IllegalArgumentException.class) public void setCapacity_whenTooSmall() { RingbufferConfig config = new RingbufferConfig(NAME); config.setCapacity(0); }
public static Path compose(final Path root, final String path) { if(StringUtils.startsWith(path, String.valueOf(Path.DELIMITER))) { // Mount absolute path final String normalized = normalize(StringUtils.replace(path, "\\", String.valueOf(Path.DELIMITER)), true); if(StringUtils.equals(normalized, String.valueOf(Path.DELIMITER))) { return root; } return new Path(normalized, normalized.equals(String.valueOf(Path.DELIMITER)) ? EnumSet.of(Path.Type.volume, Path.Type.directory) : EnumSet.of(Path.Type.directory)); } else { final String normalized; if(StringUtils.startsWith(path, String.format("%s%s", Path.HOME, Path.DELIMITER))) { // Relative path to the home directory normalized = normalize(StringUtils.removeStart(StringUtils.removeStart( StringUtils.replace(path, "\\", String.valueOf(Path.DELIMITER)), Path.HOME), String.valueOf(Path.DELIMITER)), false); } else { // Relative path normalized = normalize(StringUtils.replace(path, "\\", String.valueOf(Path.DELIMITER)), false); } if(StringUtils.equals(normalized, String.valueOf(Path.DELIMITER))) { return root; } return new Path(String.format("%s%s%s", root.getAbsolute(), root.isRoot() ? StringUtils.EMPTY : Path.DELIMITER, normalized), EnumSet.of(Path.Type.directory)); } }
@Test public void testRelativeParent() { final Path home = PathNormalizer.compose(new Path("/", EnumSet.of(Path.Type.directory)), "sandbox/sub"); assertEquals(new Path("/sandbox/sub", EnumSet.of(Path.Type.directory)), home); assertEquals(new Path("/sandbox", EnumSet.of(Path.Type.directory)), home.getParent()); }
@Override public String encrypt(final Object plainValue, final AlgorithmSQLContext algorithmSQLContext) { Object result = cryptographicAlgorithm.encrypt(plainValue); return null == result ? null : String.valueOf(result); }
@Test void assertEncrypt() { Object actual = encryptAlgorithm.encrypt("test", mock(AlgorithmSQLContext.class)); assertThat(actual, is("dSpPiyENQGDUXMKFMJPGWA==")); }
@Override protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception { AsyncByteArrayFeeder streamFeeder = streamReader.getInputFeeder(); logger.info("Decoding XMPP data.. "); byte[] buffer = new byte[in.readableBytes()]; in.readBytes(buffer); logger.debug("Buffer length: " + buffer.length); try { streamFeeder.feedInput(buffer, 0, buffer.length); } catch (XMLStreamException exception) { logger.info(exception.getMessage()); in.skipBytes(in.readableBytes()); logger.info("Bytes skipped"); throw exception; } while (streamReader.hasNext() && streamReader.next() != AsyncXMLStreamReader.EVENT_INCOMPLETE) { out.add(allocator.allocate(streamReader)); } }
@Test public void testDecodeStreamOpen() throws Exception { XmlStreamDecoder decoder = new XmlStreamDecoder(); ByteBuf buffer = Unpooled.buffer(); buffer.writeBytes(streamOpenMsg.getBytes(Charsets.UTF_8)); List<Object> list = Lists.newArrayList(); decoder.decode(new ChannelHandlerContextAdapter(), buffer, list); list.forEach(object -> { assertThat(object, is(instanceOf(XMLEvent.class))); }); assertThat(list.size(), is(2)); assertThat(((XMLEvent) list.get(0)).isStartDocument(), is(true)); ((XMLEvent) list.get(0)).isStartElement(); }
public static Map<String, String> decodeFormData(String formString) { return FORM_SPLITTER.split(formString).entrySet().stream() .collect( ImmutableMap.toImmutableMap( e -> RESTUtil.decodeString(e.getKey()), e -> RESTUtil.decodeString(e.getValue()))); }
@Test @SuppressWarnings("checkstyle:AvoidEscapedUnicodeCharacters") public void testOAuth2FormDataDecoding() { String utf8 = "\u0020\u0025\u0026\u002B\u00A3\u20AC"; String asString = "+%25%26%2B%C2%A3%E2%82%AC"; Map<String, String> expected = ImmutableMap.of("client_id", "12345", "client_secret", utf8); String formString = "client_id=12345&client_secret=" + asString; assertThat(RESTUtil.decodeFormData(formString)).isEqualTo(expected); }
public synchronized List<Page> getPages( Long tableId, int partNumber, int totalParts, List<Integer> columnIndexes, long expectedRows) { if (!contains(tableId)) { throw new PrestoException(MISSING_DATA, "Failed to find table on a worker."); } TableData tableData = tables.get(tableId); if (tableData.getRows() < expectedRows) { throw new PrestoException(MISSING_DATA, format("Expected to find [%s] rows on a worker, but found [%s].", expectedRows, tableData.getRows())); } ImmutableList.Builder<Page> partitionedPages = ImmutableList.builder(); for (int i = partNumber; i < tableData.getPages().size(); i += totalParts) { partitionedPages.add(getColumns(tableData.getPages().get(i), columnIndexes)); } return partitionedPages.build(); }
@Test(expectedExceptions = PrestoException.class) public void testTryToReadFromEmptyTable() { createTable(0L, 0L); assertEquals(pagesStore.getPages(0L, 0, 1, ImmutableList.of(0), 0), ImmutableList.of()); pagesStore.getPages(0L, 0, 1, ImmutableList.of(0), 42); }
public static URI createURIWithQuery(URI uri, String query) throws URISyntaxException { String schemeSpecificPart = uri.getRawSchemeSpecificPart(); // strip existing query if any int questionMark = schemeSpecificPart.lastIndexOf("?"); // make sure question mark is not within parentheses if (questionMark < schemeSpecificPart.lastIndexOf(")")) { questionMark = -1; } if (questionMark > 0) { schemeSpecificPart = schemeSpecificPart.substring(0, questionMark); } if (query != null && query.length() > 0) { schemeSpecificPart += "?" + query; } return new URI(uri.getScheme(), schemeSpecificPart, uri.getFragment()); }
@Test public void testCompositeCreateURIWithQuery() throws Exception { String queryString = "query=value"; URI originalURI = new URI("outerscheme:(innerscheme:innerssp)"); URI querylessURI = originalURI; assertEquals(querylessURI, URISupport.createURIWithQuery(originalURI, null)); assertEquals(querylessURI, URISupport.createURIWithQuery(originalURI, "")); assertEquals(new URI(querylessURI + "?" + queryString), URISupport.createURIWithQuery(originalURI, queryString)); originalURI = new URI("outerscheme:(innerscheme:innerssp)?outerquery=0"); assertEquals(querylessURI, URISupport.createURIWithQuery(originalURI, null)); assertEquals(querylessURI, URISupport.createURIWithQuery(originalURI, "")); assertEquals(new URI(querylessURI + "?" + queryString), URISupport.createURIWithQuery(originalURI, queryString)); originalURI = new URI("outerscheme:(innerscheme:innerssp?innerquery=0)"); querylessURI = originalURI; assertEquals(querylessURI, URISupport.createURIWithQuery(originalURI, null)); assertEquals(querylessURI, URISupport.createURIWithQuery(originalURI, "")); assertEquals(new URI(querylessURI + "?" + queryString), URISupport.createURIWithQuery(originalURI, queryString)); originalURI = new URI("outerscheme:(innerscheme:innerssp?innerquery=0)?outerquery=0"); assertEquals(querylessURI, URISupport.createURIWithQuery(originalURI, null)); assertEquals(querylessURI, URISupport.createURIWithQuery(originalURI, "")); assertEquals(new URI(querylessURI + "?" + queryString), URISupport.createURIWithQuery(originalURI, queryString)); }
static void convertInputData(final List<KiePMMLMiningField> notTargetMiningFields, final PMMLRequestData requestData) { logger.debug("convertInputData {} {}", notTargetMiningFields, requestData); Collection<ParameterInfo> requestParams = requestData.getRequestParams(); notTargetMiningFields.forEach(miningField -> { ParameterInfo parameterInfo = requestParams.stream() .filter(paramInfo -> miningField.getName().equals(paramInfo.getName())) .findFirst() .orElse(null); if (parameterInfo != null) { Object originalValue = parameterInfo.getValue(); Object requiredValue = miningField.getDataType().getActualValue(originalValue); parameterInfo.setType(miningField.getDataType().getMappedClass()); parameterInfo.setValue(requiredValue); } }); }
@Test void convertInputDataNotConvertibles() { assertThatExceptionOfType(KiePMMLException.class).isThrownBy(() -> { List<KiePMMLMiningField> miningFields = IntStream.range(0, 3).mapToObj(i -> { DATA_TYPE dataType = DATA_TYPE.values()[i]; new MiningField("FIELD-" + i, null, null, dataType, null, null, null, null, null, null); return KiePMMLMiningField.builder("FIELD-" + i, null) .withDataType(dataType) .build(); }) .collect(Collectors.toList()); PMMLRequestData pmmlRequestData = new PMMLRequestData("123", "modelName"); pmmlRequestData.addRequestParam("FIELD-0", 123); pmmlRequestData.addRequestParam("FIELD-1", true); pmmlRequestData.addRequestParam("FIELD-2", "123"); PreProcess.convertInputData(miningFields, pmmlRequestData); }); }
private void updateInputPartitions(final Map<TaskId, CompletableFuture<StateUpdater.RemovedTaskResult>> futures, final Map<TaskId, Set<TopicPartition>> newInputPartitions, final Map<TaskId, RuntimeException> failedTasks) { getNonFailedTasks(futures, failedTasks).forEach(task -> { task.updateInputPartitions( newInputPartitions.get(task.id()), topologyMetadata.nodeToSourceTopics(task.id()) ); stateUpdater.add(task); }); }
@Test public void shouldNotUpdateExistingStandbyTaskIfStandbyIsReassignedWithSameInputPartitionWithoutStateUpdater() { final StandbyTask standbyTask = standbyTask(taskId03, taskId03ChangelogPartitions) .inState(State.RUNNING) .withInputPartitions(taskId03Partitions).build(); updateExistingStandbyTaskIfStandbyIsReassignedWithoutStateUpdater(standbyTask, taskId03Partitions); verify(standbyTask, never()).updateInputPartitions(eq(taskId03Partitions), any()); }
public IntervalSet negate() { if (!isValid()) { return IntervalSet.ALWAYS; } if (mStartMs == MIN_MS) { if (mEndMs == MAX_MS) { // this is ALWAYS, so the negation is never return IntervalSet.NEVER; } return new IntervalSet(after(mEndMs)); } // start is after min if (mEndMs == MAX_MS) { return new IntervalSet(before(mStartMs)); } // start is after min, and end is before max. This requires 2 intervals. return new IntervalSet(Lists.newArrayList(before(mStartMs), after(mEndMs))); }
@Test public void negateAlways() { List<Interval> neg = Interval.ALWAYS.negate().getIntervals(); Assert.assertTrue(neg.size() == 1); Interval in = neg.get(0); Assert.assertEquals(Interval.NEVER, in); }
@Override public byte[] serialize(final String topic, final T data) { try { return delegate.serialize(topic, data); } catch (final RuntimeException e) { processingLogger.error(new SerializationError<>(e, Optional.of(data), topic, isKey)); throw e; } }
@Test public void shouldSerializeWithDelegate() { // Given: when(delegate.serialize(any(), any())).thenReturn(SOME_BYTES); // When: serializer.serialize("some topic", SOME_ROW); // Then: verify(delegate).serialize("some topic", SOME_ROW); }
public boolean doChannelCloseEvent(final String remoteAddr, final Channel channel) { boolean removed = false; Iterator<Entry<String, ConsumerGroupInfo>> it = this.consumerTable.entrySet().iterator(); while (it.hasNext()) { Entry<String, ConsumerGroupInfo> next = it.next(); ConsumerGroupInfo info = next.getValue(); ClientChannelInfo clientChannelInfo = info.doChannelCloseEvent(remoteAddr, channel); if (clientChannelInfo != null) { callConsumerIdsChangeListener(ConsumerGroupEvent.CLIENT_UNREGISTER, next.getKey(), clientChannelInfo, info.getSubscribeTopics()); if (info.getChannelInfoTable().isEmpty()) { ConsumerGroupInfo remove = this.consumerTable.remove(next.getKey()); if (remove != null) { LOGGER.info("unregister consumer ok, no any connection, and remove consumer group, {}", next.getKey()); callConsumerIdsChangeListener(ConsumerGroupEvent.UNREGISTER, next.getKey()); } } callConsumerIdsChangeListener(ConsumerGroupEvent.CHANGE, next.getKey(), info.getAllChannel()); } } return removed; }
@Test public void doChannelCloseEventTest() { consumerManager.doChannelCloseEvent("127.0.0.1", channel); assert consumerManager.getConsumerTable().size() == 0; }
public MaterializedConfiguration getConfiguration() { MaterializedConfiguration conf = new SimpleMaterializedConfiguration(); FlumeConfiguration fconfig = getFlumeConfiguration(); AgentConfiguration agentConf = fconfig.getConfigurationFor(getAgentName()); if (agentConf != null) { Map<String, ChannelComponent> channelComponentMap = Maps.newHashMap(); Map<String, SourceRunner> sourceRunnerMap = Maps.newHashMap(); Map<String, SinkRunner> sinkRunnerMap = Maps.newHashMap(); try { loadChannels(agentConf, channelComponentMap); loadSources(agentConf, channelComponentMap, sourceRunnerMap); loadSinks(agentConf, channelComponentMap, sinkRunnerMap); Set<String> channelNames = new HashSet<String>(channelComponentMap.keySet()); for (String channelName : channelNames) { ChannelComponent channelComponent = channelComponentMap.get(channelName); if (channelComponent.components.isEmpty()) { LOGGER.warn("Channel {} has no components connected" + " and has been removed.", channelName); channelComponentMap.remove(channelName); Map<String, Channel> nameChannelMap = channelCache.get(channelComponent.channel.getClass()); if (nameChannelMap != null) { nameChannelMap.remove(channelName); } } else { LOGGER.info("Channel {} connected to {}", channelName, channelComponent.components.toString()); conf.addChannel(channelName, channelComponent.channel); } } for (Map.Entry<String, SourceRunner> entry : sourceRunnerMap.entrySet()) { conf.addSourceRunner(entry.getKey(), entry.getValue()); } for (Map.Entry<String, SinkRunner> entry : sinkRunnerMap.entrySet()) { conf.addSinkRunner(entry.getKey(), entry.getValue()); } } catch (InstantiationException ex) { LOGGER.error("Failed to instantiate component", ex); } finally { channelComponentMap.clear(); sourceRunnerMap.clear(); sinkRunnerMap.clear(); } } else { LOGGER.warn("No configuration found for this host:{}", getAgentName()); } return conf; }
@Test public void testChannelThrowsExceptionDuringConfiguration() throws Exception { String agentName = "agent1"; String sourceType = "seq"; String channelType = UnconfigurableChannel.class.getName(); String sinkType = "null"; Map<String, String> properties = getProperties(agentName, sourceType, channelType, sinkType); MemoryConfigurationProvider provider = new MemoryConfigurationProvider(agentName, properties); MaterializedConfiguration config = provider.getConfiguration(); assertEquals(config.getSourceRunners().size(), 0); assertEquals(config.getChannels().size(), 0); assertEquals(config.getSinkRunners().size(), 0); }
static BayeuxClient createClient(final SalesforceComponent component, final SalesforceSession session) throws SalesforceException { // use default Jetty client from SalesforceComponent, it's shared by all consumers final SalesforceHttpClient httpClient = component.getConfig().getHttpClient(); Map<String, Object> options = new HashMap<>(); /* The timeout should be greater than 110 sec as per https://github.com/cometd/cometd/issues/1142#issuecomment-1048256297 and https://developer.salesforce.com/docs/atlas.en-us.api_streaming.meta/api_streaming/using_streaming_api_timeouts.htm */ options.put(ClientTransport.MAX_NETWORK_DELAY_OPTION, 120000); if (component.getLongPollingTransportProperties() != null) { options.putAll(component.getLongPollingTransportProperties()); } // check login access token if (session.getAccessToken() == null && !component.getLoginConfig().isLazyLogin()) { session.login(null); } CookieStore cookieStore = new CookieManager().getCookieStore(); HttpCookieStore httpCookieStore = new HttpCookieStore.Default(); ClientTransport transport = new JettyHttpClientTransport(options, httpClient) { @Override protected void customize(Request request) { super.customize(request); //accessToken might be null due to lazy login String accessToken = session.getAccessToken(); if (accessToken == null) { try { accessToken = session.login(null); } catch (SalesforceException e) { throw new RuntimeException(e); } } String finalAccessToken = new String(accessToken); request.headers(h -> h.add(HttpHeader.AUTHORIZATION, "OAuth " + finalAccessToken)); } @Override protected void storeCookies(URI uri, Map<String, List<String>> cookies) { try { CookieManager cookieManager = new CookieManager(cookieStore, CookiePolicy.ACCEPT_ALL); cookieManager.put(uri, cookies); for (java.net.HttpCookie httpCookie : cookieManager.getCookieStore().getCookies()) { httpCookieStore.add(uri, HttpCookie.from(httpCookie)); } } catch (IOException x) { if (LOG.isDebugEnabled()) { LOG.debug("Could not parse cookies", x); } } } @Override protected HttpCookieStore getHttpCookieStore() { return httpCookieStore; } }; BayeuxClient client = new BayeuxClient(getEndpointUrl(component), transport); // added eagerly to check for support during handshake client.addExtension(REPLAY_EXTENSION); return client; }
@Test public void defaultLongPollingTimeoutShouldBeGreaterThanSalesforceTimeout() throws SalesforceException { var endpointConfig = new SalesforceEndpointConfig(); endpointConfig.setHttpClient(mock(SalesforceHttpClient.class)); var session = mock(SalesforceSession.class); var component = mock(SalesforceComponent.class); when(component.getLoginConfig()).thenReturn(new SalesforceLoginConfig()); when(component.getConfig()).thenReturn(endpointConfig); when(component.getSession()).thenReturn(session); var bayeuxClient = SubscriptionHelper.createClient(component, session); var longPollingTimeout = bayeuxClient.getTransport("long-polling").getOption(MAX_NETWORK_DELAY_OPTION); MatcherAssert.assertThat(longPollingTimeout, instanceOf(Integer.class)); MatcherAssert.assertThat((Integer) longPollingTimeout, greaterThan(110000)); }
public Cookie decode(String header) { final int headerLen = checkNotNull(header, "header").length(); if (headerLen == 0) { return null; } CookieBuilder cookieBuilder = null; loop: for (int i = 0;;) { // Skip spaces and separators. for (;;) { if (i == headerLen) { break loop; } char c = header.charAt(i); if (c == ',') { // Having multiple cookies in a single Set-Cookie header is // deprecated, modern browsers only parse the first one break loop; } else if (c == '\t' || c == '\n' || c == 0x0b || c == '\f' || c == '\r' || c == ' ' || c == ';') { i++; continue; } break; } int nameBegin = i; int nameEnd; int valueBegin; int valueEnd; for (;;) { char curChar = header.charAt(i); if (curChar == ';') { // NAME; (no value till ';') nameEnd = i; valueBegin = valueEnd = -1; break; } else if (curChar == '=') { // NAME=VALUE nameEnd = i; i++; if (i == headerLen) { // NAME= (empty value, i.e. nothing after '=') valueBegin = valueEnd = 0; break; } valueBegin = i; // NAME=VALUE; int semiPos = header.indexOf(';', i); valueEnd = i = semiPos > 0 ? semiPos : headerLen; break; } else { i++; } if (i == headerLen) { // NAME (no value till the end of string) nameEnd = headerLen; valueBegin = valueEnd = -1; break; } } if (valueEnd > 0 && header.charAt(valueEnd - 1) == ',') { // old multiple cookies separator, skipping it valueEnd--; } if (cookieBuilder == null) { // cookie name-value pair DefaultCookie cookie = initCookie(header, nameBegin, nameEnd, valueBegin, valueEnd); if (cookie == null) { return null; } cookieBuilder = new CookieBuilder(cookie, header); } else { // cookie attribute cookieBuilder.appendAttribute(nameBegin, nameEnd, valueBegin, valueEnd); } } return cookieBuilder != null ? cookieBuilder.cookie() : null; }
@Test public void testDecodingSingleCookieV1ExtraParamsIgnored() { String cookieString = "myCookie=myValue;max-age=50;path=/apathsomewhere;" + "domain=.adomainsomewhere;secure;comment=this is a comment;version=1;" + "commentURL=http://aurl.com;port='80,8080';discard;"; Cookie cookie = ClientCookieDecoder.STRICT.decode(cookieString); assertNotNull(cookie); assertEquals("myValue", cookie.value()); assertEquals(".adomainsomewhere", cookie.domain()); assertEquals(50, cookie.maxAge()); assertEquals("/apathsomewhere", cookie.path()); assertTrue(cookie.isSecure()); }
static Schema getSchema(Class<? extends Message> clazz) { return getSchema(ProtobufUtil.getDescriptorForClass(clazz)); }
@Test public void testNonContiguousOneOfSchema() { assertEquals( TestProtoSchemas.NONCONTIGUOUS_ONEOF_SCHEMA, ProtoSchemaTranslator.getSchema(Proto3SchemaMessages.NonContiguousOneOf.class)); }
public static String lastElement(List<String> strings) { checkArgument(!strings.isEmpty(), "empty list"); return strings.get(strings.size() - 1); }
@Test public void testLastElementEmpty() throws Throwable { intercept(IllegalArgumentException.class, () -> lastElement(new ArrayList<>(0))); }
@Override public String getLegacyColumnName( DatabaseMetaData dbMetaData, ResultSetMetaData rsMetaData, int index ) throws KettleDatabaseException { if ( dbMetaData == null ) { throw new KettleDatabaseException( BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameNoDBMetaDataException" ) ); } if ( rsMetaData == null ) { throw new KettleDatabaseException( BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameNoRSMetaDataException" ) ); } try { return rsMetaData.getColumnLabel( index ); } catch ( Exception e ) { throw new KettleDatabaseException( String.format( "%s: %s", BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameException" ), e.getMessage() ), e ); } }
@Test public void testGetLegacyColumnNameFieldNumber() throws Exception { assertEquals( "NUMBER", new MariaDBDatabaseMeta().getLegacyColumnName( mock( DatabaseMetaData.class ), getResultSetMetaData(), 1 ) ); }
@Override public void transform(Message message, DataType fromType, DataType toType) { final Optional<ValueRange> valueRange = getValueRangeBody(message); String range = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "range", "A:A").toString(); String majorDimension = message .getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "majorDimension", RangeCoordinate.DIMENSION_ROWS).toString(); String spreadsheetId = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "spreadsheetId", "").toString(); String[] columnNames = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "columnNames", "A").toString().split(","); boolean splitResults = Boolean .parseBoolean(message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "splitResults", "false").toString()); if (valueRange.isPresent()) { message.setBody( transformFromValueRangeModel(message, valueRange.get(), spreadsheetId, range, majorDimension, columnNames)); } else if (splitResults) { message.setBody(transformFromSplitValuesModel(message, spreadsheetId, range, majorDimension, columnNames)); } else { String valueInputOption = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "valueInputOption", "USER_ENTERED").toString(); message.setBody( transformToValueRangeModel(message, spreadsheetId, range, majorDimension, valueInputOption, columnNames)); } }
@Test public void testTransformToValueRangeColumnDimension() throws Exception { Exchange inbound = new DefaultExchange(camelContext); inbound.getMessage().setHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "range", "A1:A2"); inbound.getMessage().setHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "majorDimension", RangeCoordinate.DIMENSION_COLUMNS); String model = "{" + "\"spreadsheetId\": \"" + spreadsheetId + "\"," + "\"#1\": \"a1\"," + "\"#2\": \"a2\"" + "}"; inbound.getMessage().setBody(model); transformer.transform(inbound.getMessage(), DataType.ANY, DataType.ANY); Assertions.assertEquals(spreadsheetId, inbound.getMessage().getHeader(GoogleSheetsStreamConstants.SPREADSHEET_ID)); Assertions.assertEquals("A1:A2", inbound.getMessage().getHeader(GoogleSheetsStreamConstants.RANGE)); Assertions.assertEquals(RangeCoordinate.DIMENSION_COLUMNS, inbound.getMessage().getHeader(GoogleSheetsStreamConstants.MAJOR_DIMENSION)); Assertions.assertEquals("USER_ENTERED", inbound.getMessage().getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "valueInputOption")); ValueRange valueRange = (ValueRange) inbound.getMessage().getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "values"); Assertions.assertEquals(1L, valueRange.getValues().size()); Assertions.assertEquals("a1", valueRange.getValues().get(0).get(0)); Assertions.assertEquals("a2", valueRange.getValues().get(0).get(1)); }
public static QueryOptimizer newOptimizer(HazelcastProperties properties) { HazelcastProperty property = ClusterProperty.QUERY_OPTIMIZER_TYPE; String string = properties.getString(property); Type type; try { type = Type.valueOf(string); } catch (IllegalArgumentException e) { throw onInvalidOptimizerType(string); } switch (type) { case RULES: return new RuleBasedQueryOptimizer(); default: return new EmptyOptimizer(); } }
@Test public void newOptimizer_whenPropertyContainsRule_thenCreateRulesBasedOptimizer() { HazelcastProperties hazelcastProperties = createMockHazelcastProperties(QUERY_OPTIMIZER_TYPE, "RULES"); QueryOptimizer queryOptimizer = QueryOptimizerFactory.newOptimizer(hazelcastProperties); assertThat(queryOptimizer).isInstanceOf(RuleBasedQueryOptimizer.class); }
private static <T> Set<T> subtract(Set<T> set1, Set<T> set2) { return set1.stream() .filter(value -> !set2.contains(value)) .collect(toLinkedSet()); }
@Test public void testSubtract() { assertEquals(EquatableValueSet.all(TestingIdType.ID).subtract(EquatableValueSet.all(TestingIdType.ID)), EquatableValueSet.none(TestingIdType.ID)); assertEquals(EquatableValueSet.all(TestingIdType.ID).subtract(EquatableValueSet.none(TestingIdType.ID)), EquatableValueSet.all(TestingIdType.ID)); assertEquals(EquatableValueSet.all(TestingIdType.ID).subtract(EquatableValueSet.of(TestingIdType.ID, 0L)), EquatableValueSet.of(TestingIdType.ID, 0L).complement()); assertEquals(EquatableValueSet.all(TestingIdType.ID).subtract(EquatableValueSet.of(TestingIdType.ID, 0L, 1L)), EquatableValueSet.of(TestingIdType.ID, 0L, 1L).complement()); assertEquals(EquatableValueSet.all(TestingIdType.ID).subtract(EquatableValueSet.of(TestingIdType.ID, 0L, 1L).complement()), EquatableValueSet.of(TestingIdType.ID, 0L, 1L)); assertEquals(EquatableValueSet.none(TestingIdType.ID).subtract(EquatableValueSet.all(TestingIdType.ID)), EquatableValueSet.none(TestingIdType.ID)); assertEquals(EquatableValueSet.none(TestingIdType.ID).subtract(EquatableValueSet.none(TestingIdType.ID)), EquatableValueSet.none(TestingIdType.ID)); assertEquals(EquatableValueSet.none(TestingIdType.ID).subtract(EquatableValueSet.of(TestingIdType.ID, 0L)), EquatableValueSet.none(TestingIdType.ID)); assertEquals(EquatableValueSet.none(TestingIdType.ID).subtract(EquatableValueSet.of(TestingIdType.ID, 0L, 1L)), EquatableValueSet.none(TestingIdType.ID)); assertEquals(EquatableValueSet.none(TestingIdType.ID).subtract(EquatableValueSet.of(TestingIdType.ID, 0L, 1L).complement()), EquatableValueSet.none(TestingIdType.ID)); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).subtract(EquatableValueSet.all(TestingIdType.ID)), EquatableValueSet.none(TestingIdType.ID)); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).subtract(EquatableValueSet.none(TestingIdType.ID)), EquatableValueSet.of(TestingIdType.ID, 0L)); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).subtract(EquatableValueSet.of(TestingIdType.ID, 0L)), EquatableValueSet.none(TestingIdType.ID)); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).subtract(EquatableValueSet.of(TestingIdType.ID, 0L).complement()), EquatableValueSet.of(TestingIdType.ID, 0L)); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).subtract(EquatableValueSet.of(TestingIdType.ID, 1L)), EquatableValueSet.of(TestingIdType.ID, 0L)); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).subtract(EquatableValueSet.of(TestingIdType.ID, 1L).complement()), EquatableValueSet.none(TestingIdType.ID)); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).subtract(EquatableValueSet.of(TestingIdType.ID, 0L, 1L)), EquatableValueSet.none(TestingIdType.ID)); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).subtract(EquatableValueSet.of(TestingIdType.ID, 0L, 1L).complement()), EquatableValueSet.of(TestingIdType.ID, 0L)); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).complement().subtract(EquatableValueSet.all(TestingIdType.ID)), EquatableValueSet.none(TestingIdType.ID)); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).complement().subtract(EquatableValueSet.none(TestingIdType.ID)), EquatableValueSet.of(TestingIdType.ID, 0L).complement()); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).complement().subtract(EquatableValueSet.of(TestingIdType.ID, 0L)), EquatableValueSet.of(TestingIdType.ID, 0L).complement()); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).complement().subtract(EquatableValueSet.of(TestingIdType.ID, 0L).complement()), EquatableValueSet.none(TestingIdType.ID)); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).complement().subtract(EquatableValueSet.of(TestingIdType.ID, 1L)), EquatableValueSet.of(TestingIdType.ID, 0L, 1L).complement()); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).complement().subtract(EquatableValueSet.of(TestingIdType.ID, 1L).complement()), EquatableValueSet.of(TestingIdType.ID, 1L)); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).complement().subtract(EquatableValueSet.of(TestingIdType.ID, 0L, 1L)), EquatableValueSet.of(TestingIdType.ID, 0L, 1L).complement()); assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).complement().subtract(EquatableValueSet.of(TestingIdType.ID, 0L, 1L).complement()), EquatableValueSet.of(TestingIdType.ID, 1L)); }
public static SchemaAndValue parseString(String value) { if (value == null) { return NULL_SCHEMA_AND_VALUE; } if (value.isEmpty()) { return new SchemaAndValue(Schema.STRING_SCHEMA, value); } ValueParser parser = new ValueParser(new Parser(value)); return parser.parse(false); }
@Test public void shouldParseFalseAsBooleanIfSurroundedByWhitespace() { SchemaAndValue schemaAndValue = Values.parseString(WHITESPACE + "false" + WHITESPACE); assertEquals(Type.BOOLEAN, schemaAndValue.schema().type()); assertEquals(false, schemaAndValue.value()); }
public long readWPLong() throws IOException { int ch1 = in.read(); int ch2 = in.read(); int ch3 = in.read(); int ch4 = in.read(); if ((ch1 | ch2 | ch3 | ch4) < 0) { throw new EOFException(); } return ((ch4 << 24) + (ch3 << 16) + (ch2 << 8) + ch1); }
@Test public void testReadLong() throws Exception { try (WPInputStream wpInputStream = emptyWPStream()) { wpInputStream.readWPLong(); fail("should have thrown EOF"); } catch (EOFException e) { //swallow } }
public URL addParameters(Map<String, String> parameters) { URLParam newParam = urlParam.addParameters(parameters); return returnURL(newParam); }
@Test void testAddParameters() throws Exception { URL url = URL.valueOf("dubbo://127.0.0.1:20880"); assertURLStrDecoder(url); Map<String, String> parameters = new HashMap<String, String>(); parameters.put("version", null); url.addParameters(parameters); assertURLStrDecoder(url); }
static public boolean areOnSameFileStore(File a, File b) throws RolloverFailure { if (!a.exists()) { throw new IllegalArgumentException("File [" + a + "] does not exist."); } if (!b.exists()) { throw new IllegalArgumentException("File [" + b + "] does not exist."); } // Implements the following by reflection try { Path pathA = a.toPath(); Path pathB = b.toPath(); FileStore fileStoreA = Files.getFileStore(pathA); FileStore fileStoreB = Files.getFileStore(pathB); return fileStoreA.equals(fileStoreB); } catch (Exception e) { throw new RolloverFailure("Failed to check file store equality for [" + a + "] and [" + b + "]", e); } }
@Disabled @Test public void manual_filesOnDifferentVolumesShouldBeDetectedAsSuch() throws RolloverFailure { if (!EnvUtil.isJDK7OrHigher()) return; // author's computer has two volumes File c = new File("c:/tmp/"); File d = new File("d:/"); assertFalse(FileStoreUtil.areOnSameFileStore(c, d)); }
@Override public RelativeRange apply(final Period period) { if (period != null) { return RelativeRange.Builder.builder() .from(period.withYears(0).withMonths(0).plusDays(period.getYears() * 365).plusDays(period.getMonths() * 30).toStandardSeconds().getSeconds()) .build(); } else { return null; } }
@Test void testYearsMonthsPeriodConversion() { final RelativeRange result = converter.apply(Period.years(5).plusMonths(1)); verifyResult(result, (5 * 365 + 1 * 30) * 24 * 60 * 60); }
public static void main(String[] args) throws Exception { Options cliOptions = CliFrontendOptions.initializeOptions(); CommandLineParser parser = new DefaultParser(); CommandLine commandLine = parser.parse(cliOptions, args); // Help message if (args.length == 0 || commandLine.hasOption(CliFrontendOptions.HELP)) { HelpFormatter formatter = new HelpFormatter(); formatter.setLeftPadding(4); formatter.setWidth(80); formatter.printHelp(" ", cliOptions); return; } // Create executor and execute the pipeline PipelineExecution.ExecutionInfo result = createExecutor(commandLine).run(); // Print execution result printExecutionInfo(result); }
@Test void testMissingFlinkHome() { assertThatThrownBy(() -> CliFrontend.main(new String[] {pipelineDef()})) .isInstanceOf(IllegalArgumentException.class) .hasMessage( "Cannot find Flink home from either command line arguments \"--flink-home\" " + "or the environment variable \"FLINK_HOME\". " + "Please make sure Flink home is properly set. "); }
@Override public ExecuteResult execute(final ServiceContext serviceContext, final ConfiguredKsqlPlan plan, final boolean restoreInProgress) { try { final ExecuteResult result = EngineExecutor .create(primaryContext, serviceContext, plan.getConfig()) .execute(plan.getPlan(), restoreInProgress); return result; } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { // add the statement text to the KsqlException throw new KsqlStatementException( e.getMessage(), e.getMessage(), plan.getPlan().getStatementText(), e.getCause() ); } }
@Test public void shouldNotCleanUpInternalTopicsOnReplace() { // Given: setupKsqlEngineWithSharedRuntimeEnabled(); KsqlEngineTestUtil.execute( serviceContext, ksqlEngine, "create stream s1 with (value_format = 'avro') as select * from test1;", ksqlConfig, Collections.emptyMap() ); // When: KsqlEngineTestUtil.execute( serviceContext, ksqlEngine, "create or replace stream s1 with (value_format = 'avro') as select *, 'foo' from test1;", ksqlConfig, Collections.emptyMap() ); // Then: awaitCleanupComplete(); verify(topicClient, never()).deleteInternalTopics(any()); }
public static int registerSchema( final SchemaRegistryClient srClient, final ParsedSchema parsedSchema, final String topic, final String subject, final boolean isKey ) throws KsqlSchemaAuthorizationException, KsqlException { try { if (parsedSchema instanceof ProtobufSchema) { final ProtobufSchema resolved = AbstractKafkaProtobufSerializer.resolveDependencies( srClient, true, false, true, null, new DefaultReferenceSubjectNameStrategy(), topic, isKey, (ProtobufSchema) parsedSchema ); return srClient.register(subject, resolved); } else { return srClient.register(subject, parsedSchema); } } catch (IOException | RestClientException e) { if (SchemaRegistryUtil.isAuthErrorCode(e)) { final AclOperation deniedOperation = SchemaRegistryUtil.getDeniedOperation(e.getMessage()); if (deniedOperation != AclOperation.UNKNOWN) { throw new KsqlSchemaAuthorizationException( deniedOperation, subject); } } throw new KsqlException("Could not register schema for topic: " + e.getMessage(), e); } }
@Test public void shouldRegisterAvroSchema() throws Exception { // Given: when(schemaRegistryClient.register(any(), any(AvroSchema.class))).thenReturn(1); // When: SchemaRegistryUtil.registerSchema(schemaRegistryClient, AVRO_SCHEMA, "topic", "subject", false); // Then: verify(schemaRegistryClient, times(1)).register("subject", AVRO_SCHEMA); }
public List<KiePMMLSegment> getSegments() { return segments; }
@Test void getSegments() { assertThat(KIE_PMML_SEGMENTATION.getSegments()).isNull(); final List<KiePMMLSegment> segments = getKiePMMLSegments(); KIE_PMML_SEGMENTATION = BUILDER.withSegments(segments).build(); assertThat(KIE_PMML_SEGMENTATION.getSegments()).isEqualTo(segments); }
@Override @CheckForNull public String message(Locale locale, String key, @Nullable String defaultValue, Object... parameters) { String bundleKey = propertyToBundles.get(key); String value = null; if (bundleKey != null) { try { ResourceBundle resourceBundle = ResourceBundle.getBundle(bundleKey, locale, classloader, control); value = resourceBundle.getString(key); } catch (MissingResourceException e1) { // ignore } } if (value == null) { value = defaultValue; } return formatMessage(value, parameters); }
@Test public void format_message_with_parameters() { assertThat(underTest.message(Locale.ENGLISH, "x_results", null, "10")).isEqualTo("10 results"); }
public static void tryShutdown(HazelcastInstance hazelcastInstance) { if (hazelcastInstance == null) { return; } HazelcastInstanceImpl factory = (HazelcastInstanceImpl) hazelcastInstance; closeSockets(factory); try { factory.node.shutdown(true); } catch (Throwable ignored) { ignore(ignored); } }
@Test public void testTryShutdown() { tryShutdown(hazelcastInstance); }
@Override public void finish() { finish(0L); }
@Test void finished_client_annotation() { finish("cs", "cr", Kind.CLIENT); }
public static <T> boolean isPrimitiveType(@Nonnull final Class<T> source) { Objects.requireNonNull(source); if(WRAPPER_TYPES.contains(source)) { return true; } return source.isPrimitive(); }
@SuppressWarnings("DataFlowIssue") @Test void isPrimitiveType() { Assertions.assertTrue(DataTypeUtil.isPrimitiveType(Integer.class)); Assertions.assertTrue(DataTypeUtil.isPrimitiveType(Byte.class)); Assertions.assertTrue(DataTypeUtil.isPrimitiveType(Character.class)); Assertions.assertTrue(DataTypeUtil.isPrimitiveType(Boolean.class)); Assertions.assertTrue(DataTypeUtil.isPrimitiveType(Double.class)); Assertions.assertTrue(DataTypeUtil.isPrimitiveType(Float.class)); Assertions.assertTrue(DataTypeUtil.isPrimitiveType(Long.class)); Assertions.assertTrue(DataTypeUtil.isPrimitiveType(Short.class)); Assertions.assertTrue(DataTypeUtil.isPrimitiveType(String.class)); Assertions.assertTrue(DataTypeUtil.isPrimitiveType(Void.class)); Assertions.assertFalse(DataTypeUtil.isPrimitiveType(Object.class)); Assertions.assertFalse(DataTypeUtil.isPrimitiveType(Object[].class)); Assertions.assertThrows(NullPointerException.class, () -> DataTypeUtil.isPrimitiveType(null)); }
@Override public void pluginJarAdded(BundleOrPluginFileDetails bundleOrPluginFileDetails) { final GoPluginBundleDescriptor bundleDescriptor = goPluginBundleDescriptorBuilder.build(bundleOrPluginFileDetails); try { LOGGER.info("Plugin load starting: {}", bundleOrPluginFileDetails.file()); validateIfExternalPluginRemovingBundledPlugin(bundleDescriptor); validatePluginCompatibilityWithCurrentOS(bundleDescriptor); validatePluginCompatibilityWithGoCD(bundleDescriptor); addPlugin(bundleOrPluginFileDetails, bundleDescriptor); } finally { LOGGER.info("Plugin load finished: {}", bundleOrPluginFileDetails.file()); } }
@Test void shouldLoadAPluginWhenAListOfTargetOSesIsNotDeclaredByThePluginInItsXML() throws Exception { File pluginJarFile = new File(pluginWorkDir, PLUGIN_JAR_FILE_NAME); copyPluginToTheDirectory(pluginWorkDir, PLUGIN_JAR_FILE_NAME); GoPluginBundleDescriptor descriptor = new GoPluginBundleDescriptor(getPluginDescriptor("some.old.id", "1.0", pluginJarFile.getAbsolutePath(), new File(PLUGIN_JAR_FILE_NAME), false, null)); when(systemEnvironment.getOperatingSystemFamilyJvmName()).thenReturn("Windows"); when(goPluginBundleDescriptorBuilder.build(new BundleOrPluginFileDetails(pluginJarFile, true, pluginWorkDir))).thenReturn(descriptor); listener = new DefaultPluginJarChangeListener(registry, osgiManifestGenerator, pluginLoader, goPluginBundleDescriptorBuilder, systemEnvironment); listener.pluginJarAdded(new BundleOrPluginFileDetails(pluginJarFile, true, pluginWorkDir)); verify(registry, times(1)).loadPlugin(descriptor); verify(pluginLoader, times(1)).loadPlugin(descriptor); }
@Override protected Mono<Boolean> doMatcher(final ServerWebExchange exchange, final WebFilterChain chain) { String path = exchange.getRequest().getURI().getRawPath(); return Mono.just(paths.contains(path)); }
@Test public void testDoNotMatcher() { ServerWebExchange webExchange = MockServerWebExchange.from(MockServerHttpRequest .post("http://localhost:8080/")); Mono<Boolean> filter = excludeFilter.doMatcher(webExchange, webFilterChain); StepVerifier.create(filter).expectNext(Boolean.FALSE).verifyComplete(); }
@POST @Consumes(MediaType.APPLICATION_JSON) @Path("{component}") public Response setConfigs(@PathParam("component") String component, @DefaultValue("false") @QueryParam("preset") boolean preset, InputStream request) throws IOException { ComponentConfigService service = get(ComponentConfigService.class); ObjectNode props = readTreeFromStream(mapper(), request); List<String> errorMsgs = new ArrayList<String>(); if (preset) { props.fieldNames().forEachRemaining(k -> { try { service.preSetProperty(component, k, props.path(k).asText()); } catch (IllegalArgumentException e) { errorMsgs.add(e.getMessage()); } }); } else { props.fieldNames().forEachRemaining(k -> { try { service.setProperty(component, k, props.path(k).asText()); } catch (IllegalArgumentException e) { errorMsgs.add(e.getMessage()); } }); } if (!errorMsgs.isEmpty()) { return Response.status(MULTI_STATUS_RESPONE).entity(produceErrorJson(errorMsgs)).build(); } return Response.ok().build(); }
@Test public void setConfigs() { WebTarget wt = target(); try { wt.path("configuration/foo").request().post( Entity.json("{ \"k\" : \"v\" }"), String.class); } catch (BadRequestException e) { assertEquals("incorrect key", "foo", service.component); assertEquals("incorrect key", "k", service.name); assertEquals("incorrect value", "v", service.value); } }
@Override public CeTaskResult getResult() { checkState(this.result != null, "No CeTaskResult has been set in the holder"); return this.result; }
@Test public void getResult_throws_ISE_if_no_CeTaskResult_is_set() { assertThatThrownBy(() -> underTest.getResult()) .isInstanceOf(IllegalStateException.class) .hasMessage("No CeTaskResult has been set in the holder"); }
static <ID, T> TaskExecutors<ID, T> singleItemExecutors(final String name, int workerCount, final TaskProcessor<T> processor, final AcceptorExecutor<ID, T> acceptorExecutor) { final AtomicBoolean isShutdown = new AtomicBoolean(); final TaskExecutorMetrics metrics = new TaskExecutorMetrics(name); registeredMonitors.put(name, metrics); return new TaskExecutors<>(idx -> new SingleTaskWorkerRunnable<>("TaskNonBatchingWorker-" + name + '-' + idx, isShutdown, metrics, processor, acceptorExecutor), workerCount, isShutdown); }
@Test public void testSingleItemProcessingWithTransientError() throws Exception { taskExecutors = TaskExecutors.singleItemExecutors("TEST", 1, processor, acceptorExecutor); TaskHolder<Integer, ProcessingResult> taskHolder = transientErrorTaskHolder(1); taskQueue.add(taskHolder); // Verify that transient task is be re-scheduled processor.expectTransientErrors(1); verify(acceptorExecutor, timeout(500).times(1)).reprocess(taskHolder, ProcessingResult.TransientError); }
@Override public void updateCategory(ProductCategorySaveReqVO updateReqVO) { // 校验分类是否存在 validateProductCategoryExists(updateReqVO.getId()); // 校验父分类存在 validateParentProductCategory(updateReqVO.getParentId()); // 更新 ProductCategoryDO updateObj = BeanUtils.toBean(updateReqVO, ProductCategoryDO.class); productCategoryMapper.updateById(updateObj); }
@Test public void testUpdateCategory_success() { // mock 数据 ProductCategoryDO dbCategory = randomPojo(ProductCategoryDO.class); productCategoryMapper.insert(dbCategory);// @Sql: 先插入出一条存在的数据 // 准备参数 ProductCategorySaveReqVO reqVO = randomPojo(ProductCategorySaveReqVO.class, o -> { o.setId(dbCategory.getId()); // 设置更新的 ID }); // mock 父类 ProductCategoryDO parentProductCategory = randomPojo(ProductCategoryDO.class, o -> o.setId(reqVO.getParentId())); productCategoryMapper.insert(parentProductCategory); // 调用 productCategoryService.updateCategory(reqVO); // 校验是否更新正确 ProductCategoryDO category = productCategoryMapper.selectById(reqVO.getId()); // 获取最新的 assertPojoEquals(reqVO, category); }
public static List<String> splitMarkdownParagraphs( List<String> lines, int maxTokensPerParagraph) { return internalSplitTextParagraphs( lines, maxTokensPerParagraph, (text) -> internalSplitLines( text, maxTokensPerParagraph, false, s_markdownSplitOptions)); }
@Test public void canSplitMarkdownParagraphsOnNewlines() { List<String> input = Arrays.asList( "This_is_a_test_of_the_emergency_broadcast_system\r\nThis_is_only_a_test", "We_repeat_this_is_only_a_test\nA_unit_test", "A_small_note\n" + "And_another\r\n" + "And_once_again\r" + "Seriously_this_is_the_end\n" + "We're_finished\n" + "All_set\n" + "Bye\n", "Done"); List<String> expected = Arrays.asList( "This_is_a_test_of_the_emergency_broadcast_system", "This_is_only_a_test", "We_repeat_this_is_only_a_test\nA_unit_test", "A_small_note\nAnd_another\nAnd_once_again", "Seriously_this_is_the_end\nWe're_finished\nAll_set\nBye Done"); List<String> result = TextChunker.splitMarkdownParagraphs(input, 15); Assertions.assertEquals(expected, result); }
public boolean isBeforeOrAt(KinesisRecord other) { if (shardIteratorType == AT_TIMESTAMP) { return timestamp.compareTo(other.getApproximateArrivalTimestamp()) <= 0; } int result = extendedSequenceNumber().compareTo(other.getExtendedSequenceNumber()); if (result == 0) { return shardIteratorType == AT_SEQUENCE_NUMBER; } return result < 0; }
@Test public void testComparisonWithExtendedSequenceNumber() { assertThat( new ShardCheckpoint("", "", new StartingPoint(LATEST)) .isBeforeOrAt(recordWith(new ExtendedSequenceNumber("100", 0L)))) .isTrue(); assertThat( new ShardCheckpoint("", "", new StartingPoint(TRIM_HORIZON)) .isBeforeOrAt(recordWith(new ExtendedSequenceNumber("100", 0L)))) .isTrue(); assertThat( checkpoint(AFTER_SEQUENCE_NUMBER, "10", 1L) .isBeforeOrAt(recordWith(new ExtendedSequenceNumber("100", 0L)))) .isTrue(); assertThat( checkpoint(AT_SEQUENCE_NUMBER, "100", 0L) .isBeforeOrAt(recordWith(new ExtendedSequenceNumber("100", 0L)))) .isTrue(); assertThat( checkpoint(AFTER_SEQUENCE_NUMBER, "100", 0L) .isBeforeOrAt(recordWith(new ExtendedSequenceNumber("100", 0L)))) .isFalse(); assertThat( checkpoint(AT_SEQUENCE_NUMBER, "100", 1L) .isBeforeOrAt(recordWith(new ExtendedSequenceNumber("100", 0L)))) .isFalse(); assertThat( checkpoint(AFTER_SEQUENCE_NUMBER, "100", 0L) .isBeforeOrAt(recordWith(new ExtendedSequenceNumber("99", 1L)))) .isFalse(); }
public MutableRecordBatch nextBatch() { int remaining = buffer.remaining(); Integer batchSize = nextBatchSize(); if (batchSize == null || remaining < batchSize) return null; byte magic = buffer.get(buffer.position() + MAGIC_OFFSET); ByteBuffer batchSlice = buffer.slice(); batchSlice.limit(batchSize); buffer.position(buffer.position() + batchSize); if (magic > RecordBatch.MAGIC_VALUE_V1) return new DefaultRecordBatch(batchSlice); else return new AbstractLegacyRecordBatch.ByteBufferLegacyRecordBatch(batchSlice); }
@Test public void iteratorRaisesOnTooSmallRecords() { ByteBuffer buffer = ByteBuffer.allocate(1024); MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, Compression.NONE, TimestampType.CREATE_TIME, 0L); builder.append(15L, "a".getBytes(), "1".getBytes()); builder.append(20L, "b".getBytes(), "2".getBytes()); builder.close(); int position = buffer.position(); builder = MemoryRecords.builder(buffer, Compression.NONE, TimestampType.CREATE_TIME, 2L); builder.append(30L, "c".getBytes(), "3".getBytes()); builder.append(40L, "d".getBytes(), "4".getBytes()); builder.close(); buffer.flip(); buffer.putInt(position + DefaultRecordBatch.LENGTH_OFFSET, 9); ByteBufferLogInputStream logInputStream = new ByteBufferLogInputStream(buffer, Integer.MAX_VALUE); assertNotNull(logInputStream.nextBatch()); assertThrows(CorruptRecordException.class, logInputStream::nextBatch); }
@Override public DatabaseMetaData getMetaData() { return new CircuitBreakerDatabaseMetaData(); }
@Test void assertGetMetaData() { assertThat(connection.getMetaData(), instanceOf(CircuitBreakerDatabaseMetaData.class)); }
@Override public void loginSuccess(HttpRequest request, @Nullable String login, Source source) { checkRequest(request); requireNonNull(source, "source can't be null"); LOGGER.atDebug().setMessage("login success [method|{}][provider|{}|{}][IP|{}|{}][login|{}]") .addArgument(source::getMethod) .addArgument(source::getProvider) .addArgument(source::getProviderName) .addArgument(request::getRemoteAddr) .addArgument(() -> getAllIps(request)) .addArgument(() -> preventLogFlood(sanitizeLog(emptyIfNull(login)))) .log(); }
@Test public void login_success_prevents_log_flooding_on_login_starting_from_128_chars() { underTest.loginSuccess(mockRequest(), LOGIN_129_CHARS, Source.realm(Method.BASIC, "some provider name")); verifyLog("login success [method|BASIC][provider|REALM|some provider name][IP||][login|012345678901234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789012345678901234567...(129)]", Set.of("logout", "login failure")); }
public boolean isSupported(final SQLStatement sqlStatement) { for (Class<? extends SQLStatement> each : supportedSQLStatements) { if (each.isAssignableFrom(sqlStatement.getClass())) { return true; } } for (Class<? extends SQLStatement> each : unsupportedSQLStatements) { if (each.isAssignableFrom(sqlStatement.getClass())) { return false; } } return true; }
@Test void assertIsNotSupportedWithInUnsupportedList() { assertFalse(new SQLSupportedJudgeEngine(Collections.emptyList(), Collections.singleton(SelectStatement.class)).isSupported(mock(SelectStatement.class))); }
@Scheduled(fixedDelay = 3000) public void send() { CarDto carDto = CarDto.builder() .id(UUID.randomUUID()) .color("white") .name("vw") .build(); RegistrationDto registrationDto = template.convertSendAndReceiveAsType( directExchange.getName(), ROUTING_KEY, carDto, new ParameterizedTypeReference<>() { }); }
@Test void sendMessageSynchronously() { // given // when ThrowableAssert.ThrowingCallable send = () -> statefulBlockingClient.send(); // then assertThatCode(send).doesNotThrowAnyException(); }
@VisibleForTesting static void instantiateMetaspaceMemoryMetrics(final MetricGroup parentMetricGroup) { final List<MemoryPoolMXBean> memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans().stream() .filter(bean -> "Metaspace".equals(bean.getName())) .collect(Collectors.toList()); if (memoryPoolMXBeans.isEmpty()) { LOG.info( "The '{}' metrics will not be exposed because no pool named 'Metaspace' could be found. This might be caused by the used JVM.", METRIC_GROUP_METASPACE_NAME); return; } final MetricGroup metricGroup = parentMetricGroup.addGroup(METRIC_GROUP_METASPACE_NAME); final Iterator<MemoryPoolMXBean> beanIterator = memoryPoolMXBeans.iterator(); final MemoryPoolMXBean firstPool = beanIterator.next(); instantiateMemoryUsageMetrics(metricGroup, firstPool::getUsage); if (beanIterator.hasNext()) { LOG.debug( "More than one memory pool named 'Metaspace' is present. Only the first pool was used for instantiating the '{}' metrics.", METRIC_GROUP_METASPACE_NAME); } }
@Test void testMetaspaceCompleteness() { assertThat(hasMetaspaceMemoryPool()) .withFailMessage("Requires JVM with Metaspace memory pool") .isTrue(); final InterceptingOperatorMetricGroup metaspaceMetrics = new InterceptingOperatorMetricGroup() { @Override public MetricGroup addGroup(String name) { return this; } }; MetricUtils.instantiateMetaspaceMemoryMetrics(metaspaceMetrics); assertThat(metaspaceMetrics.get(MetricNames.MEMORY_USED)).isNotNull(); assertThat(metaspaceMetrics.get(MetricNames.MEMORY_COMMITTED)).isNotNull(); assertThat(metaspaceMetrics.get(MetricNames.MEMORY_MAX)).isNotNull(); }
@Override public byte[] getBytes(final int columnIndex) throws SQLException { return (byte[]) ResultSetUtils.convertValue(mergeResultSet.getValue(columnIndex, byte[].class), byte[].class); }
@Test void assertGetBytesWithColumnIndex() throws SQLException { when(mergeResultSet.getValue(1, byte[].class)).thenReturn(new byte[]{(byte) 1}); assertThat(shardingSphereResultSet.getBytes(1), is(new byte[]{(byte) 1})); }
@Override public Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { synchronized (getClassLoadingLock(name)) { Class<?> loadedClass = findLoadedClass(name); if (loadedClass != null) { return loadedClass; } if (isClosed) { throw new ClassNotFoundException("This ClassLoader is closed"); } if (config.shouldAcquire(name)) { loadedClass = PerfStatsCollector.getInstance() .measure("load sandboxed class", () -> maybeInstrumentClass(name)); } else { loadedClass = getParent().loadClass(name); } if (resolve) { resolveClass(loadedClass); } return loadedClass; } }
@Test public void directlyCallingNativeMethodShouldBeNoOp() throws Exception { Class<?> exampleClass = loadClass(AClassWithNativeMethod.class); Object exampleInstance = exampleClass.getDeclaredConstructor().newInstance(); Method directMethod = findDirectMethod(exampleClass, "nativeMethod", String.class, int.class); assertThat(Modifier.isNative(directMethod.getModifiers())).isFalse(); assertThat(directMethod.invoke(exampleInstance, "", 1)).isNull(); }
@Udf public <T extends Comparable<? super T>> T arrayMin(@UdfParameter( description = "Array of values from which to find the minimum") final List<T> input) { if (input == null) { return null; } T candidate = null; for (T thisVal : input) { if (thisVal != null) { if (candidate == null) { candidate = thisVal; } else if (thisVal.compareTo(candidate) < 0) { candidate = thisVal; } } } return candidate; }
@Test public void shouldFindIntMin() { final List<Integer> input = Arrays.asList(1, 3, -2); assertThat(udf.arrayMin(input), is(-2)); }
@Override public boolean decide(final SelectStatementContext selectStatementContext, final List<Object> parameters, final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database, final ShardingRule rule, final Collection<DataNode> includedDataNodes) { Collection<String> tableNames = rule.getShardingLogicTableNames(selectStatementContext.getTablesContext().getTableNames()); if (tableNames.isEmpty()) { return false; } includedDataNodes.addAll(getTableDataNodes(rule, tableNames, database)); if (selectStatementContext.isContainsSubquery() || selectStatementContext.isContainsHaving() || selectStatementContext.isContainsCombine() || selectStatementContext.isContainsPartialDistinctAggregation()) { return true; } if (!selectStatementContext.isContainsJoinQuery() || rule.isAllTablesInSameDataSource(tableNames)) { return false; } if (1 == tableNames.size() && selectStatementContext.isContainsJoinQuery() && !rule.isAllBindingTables(database, selectStatementContext, tableNames)) { return true; } return tableNames.size() > 1 && !rule.isAllBindingTables(database, selectStatementContext, tableNames); }
@Test void assertDecideWhenContainsCombine() { SelectStatementContext select = createStatementContext(); when(select.isContainsCombine()).thenReturn(true); Collection<DataNode> includedDataNodes = new HashSet<>(); ShardingRule shardingRule = createShardingRule(); assertTrue(new ShardingSQLFederationDecider().decide(select, Collections.emptyList(), mock(RuleMetaData.class), createDatabase(shardingRule), shardingRule, includedDataNodes)); assertThat(includedDataNodes.size(), is(4)); }
@Override public CompletableFuture<List<Long>> getSplitBoundary(BundleSplitOption bundleSplitOption) { NamespaceService service = bundleSplitOption.getService(); NamespaceBundle bundle = bundleSplitOption.getBundle(); List<Long> positions = bundleSplitOption.getPositions(); if (positions == null || positions.size() == 0) { throw new IllegalArgumentException("SplitBoundaries can't be empty"); } // sort all positions Collections.sort(positions); if (force) { return getBoundaries(bundle, positions); } else { return service.getOwnedTopicListForNamespaceBundle(bundle) .thenCompose(topics -> { if (topics == null || topics.size() <= 1) { return CompletableFuture.completedFuture(null); } return getBoundaries(bundle, positions); }); } }
@Test public void testEmptyTopicWithForce() { SpecifiedPositionsBundleSplitAlgorithm algorithm = new SpecifiedPositionsBundleSplitAlgorithm(true); NamespaceService mockNamespaceService = mock(NamespaceService.class); NamespaceBundle mockNamespaceBundle = mock(NamespaceBundle.class); doReturn(1L).when(mockNamespaceBundle).getLowerEndpoint(); doReturn(1000L).when(mockNamespaceBundle).getUpperEndpoint(); doReturn(CompletableFuture.completedFuture(List.of())) .when(mockNamespaceService).getOwnedTopicListForNamespaceBundle(mockNamespaceBundle); List<Long> splitPositions = algorithm.getSplitBoundary(new BundleSplitOption(mockNamespaceService, mockNamespaceBundle, Arrays.asList(1L, 2L))).join(); assertEquals(splitPositions, Arrays.asList(2L)); }
@Override public boolean reveal(final Local file, final boolean select) { synchronized(NSWorkspace.class) { // If a second path argument is specified, a new file viewer is opened. If you specify an // empty string (@"") for this parameter, the file is selected in the main viewer. if(select) { return workspace.selectFile(new NFDNormalizer().normalize(file.getAbsolute()).toString(), StringUtils.EMPTY); } else { if(file.isFile()) { return new WorkspaceApplicationLauncher().open(file); } else { return workspace.selectFile(null, new NFDNormalizer().normalize(file.getAbsolute()).toString()); } } } }
@Test public void testReveal() { assertTrue(new WorkspaceRevealService().reveal(new Local(System.getProperty("java.io.tmpdir")))); }
public V computeIfAbsent(K key, Function<K, V> provider) { requireNonNull(key); requireNonNull(provider); long h = hash(key); return getSection(h).put(key, null, (int) h, true, provider); }
@Test public void testComputeIfAbsent() { ConcurrentOpenHashMap<Integer, Integer> map = ConcurrentOpenHashMap.<Integer, Integer>newBuilder() .expectedItems(16) .concurrencyLevel(1) .build(); AtomicInteger counter = new AtomicInteger(); Function<Integer, Integer> provider = key -> counter.getAndIncrement(); assertEquals(map.computeIfAbsent(0, provider).intValue(), 0); assertEquals(map.get(0).intValue(), 0); assertEquals(map.computeIfAbsent(1, provider).intValue(), 1); assertEquals(map.get(1).intValue(), 1); assertEquals(map.computeIfAbsent(1, provider).intValue(), 1); assertEquals(map.get(1).intValue(), 1); assertEquals(map.computeIfAbsent(2, provider).intValue(), 2); assertEquals(map.get(2).intValue(), 2); }
@VisibleForTesting static Comparator<ActualProperties> streamingExecutionPreference(PreferredProperties preferred) { // Calculating the matches can be a bit expensive, so cache the results between comparisons LoadingCache<List<LocalProperty<VariableReferenceExpression>>, List<Optional<LocalProperty<VariableReferenceExpression>>>> matchCache = CacheBuilder.newBuilder() .build(CacheLoader.from(actualProperties -> LocalProperties.match(actualProperties, preferred.getLocalProperties()))); return (actual1, actual2) -> { List<Optional<LocalProperty<VariableReferenceExpression>>> matchLayout1 = matchCache.getUnchecked(actual1.getLocalProperties()); List<Optional<LocalProperty<VariableReferenceExpression>>> matchLayout2 = matchCache.getUnchecked(actual2.getLocalProperties()); return ComparisonChain.start() .compareTrueFirst(hasLocalOptimization(preferred.getLocalProperties(), matchLayout1), hasLocalOptimization(preferred.getLocalProperties(), matchLayout2)) .compareTrueFirst(meetsPartitioningRequirements(preferred, actual1), meetsPartitioningRequirements(preferred, actual2)) .compare(matchLayout1, matchLayout2, matchedLayoutPreference()) .result(); }; }
@Test public void testPickLayoutPartitionedOnMultiple() { Comparator<ActualProperties> preference = streamingExecutionPreference( PreferredProperties.partitioned(ImmutableSet.of(variable("a"), variable("b")))); List<ActualProperties> input = ImmutableList.<ActualProperties>builder() .add(builder() .global(streamPartitionedOn("a")) .build()) .add(builder() .global(singleStreamPartition()) .build()) .add(builder() .global(arbitraryPartition()) .local(ImmutableList.of(grouped("a", "b"))) .build()) .add(builder() .global(arbitraryPartition()) .build()) .add(builder() .global(hashDistributedOn("a")) .build()) .add(builder() .global(singleStream()) .local(ImmutableList.of(constant("a"), sorted("b", ASC_NULLS_FIRST))) .build()) .add(builder() .global(singleStreamPartition()) .local(ImmutableList.of(sorted("a", ASC_NULLS_FIRST))) .build()) .add(builder() .global(hashDistributedOn("a")) .build()) .build(); List<ActualProperties> expected = ImmutableList.<ActualProperties>builder() .add(builder() .global(streamPartitionedOn("a")) .build()) .add(builder() .global(singleStreamPartition()) .build()) .add(builder() .global(hashDistributedOn("a")) .build()) .add(builder() .global(singleStream()) .local(ImmutableList.of(constant("a"), sorted("b", ASC_NULLS_FIRST))) .build()) .add(builder() .global(singleStreamPartition()) .local(ImmutableList.of(sorted("a", ASC_NULLS_FIRST))) .build()) .add(builder() .global(hashDistributedOn("a")) .build()) .add(builder() .global(arbitraryPartition()) .local(ImmutableList.of(grouped("a", "b"))) .build()) .add(builder() .global(arbitraryPartition()) .build()) .build(); assertEquals(stableSort(input, preference), expected); }
@Override public LogicalSchema getSchema() { return schema; }
@Test public void shouldHaveFullyQualifiedWindowedSchema() { // Given: givenWindowedSource(true); givenNodeWithMockSource(); // When: final LogicalSchema schema = node.getSchema(); // Then: assertThat(schema, is(REAL_SCHEMA.withPseudoAndKeyColsInValue(true))); }