focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { try { final CloudBlobContainer container = session.getClient().getContainerReference(containerService.getContainer(directory).getName()); final AttributedList<Path> children = new AttributedList<>(); ResultContinuation token = null; ResultSegment<ListBlobItem> result; String prefix = StringUtils.EMPTY; if(!containerService.isContainer(directory)) { prefix = containerService.getKey(directory); if(!prefix.endsWith(String.valueOf(Path.DELIMITER))) { prefix += Path.DELIMITER; } } boolean hasDirectoryPlaceholder = containerService.isContainer(directory); do { final BlobRequestOptions options = new BlobRequestOptions(); result = container.listBlobsSegmented(prefix, false, EnumSet.noneOf(BlobListingDetails.class), new HostPreferences(session.getHost()).getInteger("azure.listing.chunksize"), token, options, context); for(ListBlobItem object : result.getResults()) { if(new SimplePathPredicate(new Path(object.getUri().getPath(), EnumSet.of(Path.Type.directory))).test(directory)) { if(log.isDebugEnabled()) { log.debug(String.format("Skip placeholder key %s", object)); } hasDirectoryPlaceholder = true; continue; } final PathAttributes attributes = new PathAttributes(); if(object instanceof CloudBlob) { final CloudBlob blob = (CloudBlob) object; attributes.setSize(blob.getProperties().getLength()); attributes.setModificationDate(blob.getProperties().getLastModified().getTime()); attributes.setETag(blob.getProperties().getEtag()); if(StringUtils.isNotBlank(blob.getProperties().getContentMD5())) { attributes.setChecksum(Checksum.parse(Hex.encodeHexString(Base64.decodeBase64(blob.getProperties().getContentMD5())))); } } // A directory is designated by a delimiter character. final EnumSet<Path.Type> types = object instanceof CloudBlobDirectory ? EnumSet.of(Path.Type.directory, Path.Type.placeholder) : EnumSet.of(Path.Type.file); final Path child = new Path(directory, PathNormalizer.name(object.getUri().getPath()), types, attributes); children.add(child); } listener.chunk(directory, children); token = result.getContinuationToken(); } while(result.getHasMoreResults()); if(!hasDirectoryPlaceholder && children.isEmpty()) { if(log.isWarnEnabled()) { log.warn(String.format("No placeholder found for directory %s", directory)); } throw new NotfoundException(directory.getAbsolute()); } return children; } catch(StorageException e) { throw new AzureExceptionMappingService().map("Listing directory {0} failed", e, directory); } catch(URISyntaxException e) { throw new NotfoundException(e.getMessage(), e); } }
@Test(expected = NotfoundException.class) public void testListNotFoundFolder() throws Exception { final Path container = new Path("cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); new AzureObjectListService(session, null).list(new Path(container, "notfound", EnumSet.of(Path.Type.directory)), new DisabledListProgressListener()); }
void placeOrder(Order order) { sendShippingRequest(order); }
@Test void testPlaceOrderNoExceptionShortMsgDuration() throws Exception { long paymentTime = timeLimits.paymentTime(); long queueTaskTime = timeLimits.queueTaskTime(); long messageTime = timeLimits.messageTime(); long employeeTime = timeLimits.employeeTime(); long queueTime = timeLimits.queueTime(); for (double d = 0.1; d < 2; d = d + 0.1) { paymentTime *= d; queueTaskTime *= d; messageTime *= d; employeeTime *= d; queueTime *= d; Commander c = buildCommanderObjectNoPaymentException1(); var order = new Order(new User("K", "J"), "pen", 1f); for (Order.MessageSent ms : Order.MessageSent.values()) { c.placeOrder(order); assertFalse(StringUtils.isBlank(order.id)); } } }
@Override public Enumeration<URL> getResources(String name) throws IOException { List<URL> resources = new ArrayList<>(); ClassLoadingStrategy loadingStrategy = getClassLoadingStrategy(name); log.trace("Received request to load resources '{}'", name); for (ClassLoadingStrategy.Source classLoadingSource : loadingStrategy.getSources()) { switch (classLoadingSource) { case APPLICATION: if (getParent() != null) { resources.addAll(Collections.list(getParent().getResources(name))); } break; case PLUGIN: resources.addAll(Collections.list(findResources(name))); break; case DEPENDENCIES: resources.addAll(findResourcesFromDependencies(name)); break; } } return Collections.enumeration(resources); }
@Test void parentFirstGetResourcesExistsOnlyInDependency() throws IOException, URISyntaxException { Enumeration<URL> resources = parentFirstPluginClassLoader.getResources("META-INF/dependency-file"); assertNumberOfResourcesAndFirstLineOfFirstElement(1, "dependency", resources); }
@Override public void updateLockStatus(String xid, LockStatus lockStatus) { }
@Test public void testUpdateLockStatus() { LocalDBLocker locker = new LocalDBLocker(); String xid = "xid"; LockStatus lockStatus = LockStatus.Locked; locker.updateLockStatus(xid, lockStatus); }
@Override public Flux<BooleanResponse<RenameCommand>> rename(Publisher<RenameCommand> commands) { return execute(commands, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getNewName(), "New name must not be null!"); byte[] keyBuf = toByteArray(command.getKey()); byte[] newKeyBuf = toByteArray(command.getNewName()); if (executorService.getConnectionManager().calcSlot(keyBuf) == executorService.getConnectionManager().calcSlot(newKeyBuf)) { return super.rename(commands); } return read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.DUMP, keyBuf) .filter(Objects::nonNull) .zipWith( Mono.defer(() -> pTtl(command.getKey()) .filter(Objects::nonNull) .map(ttl -> Math.max(0, ttl)) .switchIfEmpty(Mono.just(0L)) ) ) .flatMap(valueAndTtl -> { return write(newKeyBuf, StringCodec.INSTANCE, RedisCommands.RESTORE, newKeyBuf, valueAndTtl.getT2(), valueAndTtl.getT1()); }) .thenReturn(new BooleanResponse<>(command, true)) .doOnSuccess((ignored) -> del(command.getKey())); }); }
@Test public void testRename_keyNotExist() { Integer originalSlot = getSlotForKey(originalKey); newKey = getNewKeyForSlot(new String(originalKey.array()), getTargetSlot(originalSlot)); if (sameSlot) { // This is a quirk of the implementation - since same-slot renames use the non-cluster version, // the result is a Redis error. This behavior matches other spring-data-redis implementations assertThatThrownBy(() -> connection.keyCommands().rename(originalKey, newKey).block()) .isInstanceOf(RedisSystemException.class); } else { Boolean response = connection.keyCommands().rename(originalKey, newKey).block(); assertThat(response).isTrue(); final ByteBuffer newKeyValue = connection.stringCommands().get(newKey).block(); assertThat(newKeyValue).isEqualTo(null); } }
@Override public Neighbor<double[], E> nearest(double[] q) { NeighborBuilder<double[], E> neighbor = new NeighborBuilder<>(); search(q, root, neighbor); neighbor.key = keys[neighbor.index]; neighbor.value = data[neighbor.index]; return neighbor.toNeighbor(); }
@Test public void testBenchmark() throws Exception { System.out.println("----- Benchmark -----"); int N = 40000; int scale = 100; int numTests = 100_000; double[][] coords = new double[N][3]; for (int i = 0; i < N; i++) { coords[i][0] = MathEx.random() * scale; coords[i][1] = MathEx.random() * scale; coords[i][2] = MathEx.random() * scale; } KDTree<double[]> kdt = new KDTree<>(coords, coords); long start = System.currentTimeMillis(); double[] q = new double[3]; for (int i = 0; i < numTests; i++) { q[0] = MathEx.random() * scale; q[1] = MathEx.random() * scale; q[2] = MathEx.random() * scale; kdt.nearest(q); } double time = (System.currentTimeMillis() - start) / 1000.0; System.out.format("Benchmark: %.2fs%n", time); assertTrue(time < 0.25); }
public static OptExpression bind(Pattern pattern, GroupExpression groupExpression) { Binder binder = new Binder(pattern, groupExpression); return binder.next(); }
@Test public void testBinder2() { OptExpression expr = OptExpression.create(new MockOperator(OperatorType.LOGICAL_JOIN), new OptExpression(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN)), new OptExpression(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN))); Pattern pattern = Pattern.create(OperatorType.PATTERN_LEAF) .addChildren(Pattern.create(OperatorType.LOGICAL_JOIN)) .addChildren(Pattern.create(OperatorType.PATTERN_LEAF)); Memo memo = new Memo(); OptExpression result = Binder.bind(pattern, memo.init(expr)); assertNull(result); }
public void drainHandler(final Runnable handler) { checkContext(); if (drainHandler != null) { throw new IllegalStateException("drainHandler already set"); } this.drainHandler = Objects.requireNonNull(handler); }
@Test public void shouldNotAllowSettingDrainHandlerMoreThanOnce() throws Exception { CountDownLatch latch = new CountDownLatch(1); context.runOnContext(v -> { getBufferedPublisher().drainHandler(() -> { }); try { getBufferedPublisher().drainHandler(() -> { }); fail("Should throw exception"); } catch (IllegalStateException e) { // OK latch.countDown(); } }); awaitLatch(latch); }
public SqlType getExpressionSqlType(final Expression expression) { return getExpressionSqlType(expression, Collections.emptyMap()); }
@Test public void shouldEvaluateAnyNumberOfArgumentLambda() { // Given: givenUdfWithNameAndReturnType("TRANSFORM", SqlTypes.STRING); when(function.parameters()).thenReturn( ImmutableList.of( ArrayType.of(DoubleType.INSTANCE), StringType.INSTANCE, MapType.of(LongType.INSTANCE, DoubleType.INSTANCE), LambdaType.of( ImmutableList.of( DoubleType.INSTANCE, StringType.INSTANCE, LongType.INSTANCE, DoubleType.INSTANCE ), StringType.INSTANCE))); final Expression expression = new FunctionCall( FunctionName.of("TRANSFORM"), ImmutableList.of( ARRAYCOL, new StringLiteral("Q"), MAPCOL, new LambdaFunctionCall( ImmutableList.of("A", "B", "C", "D"), new ArithmeticBinaryExpression( Operator.ADD, new LambdaVariable("C"), new IntegerLiteral(5)) ))); // When: final SqlType exprType = expressionTypeManager.getExpressionSqlType(expression); // Then: assertThat(exprType, is(SqlTypes.STRING)); verify(udfFactory).getFunction( ImmutableList.of( SqlArgument.of(SqlTypes.array(SqlTypes.DOUBLE)), SqlArgument.of(SqlTypes.STRING), SqlArgument.of(SqlTypes.map(SqlTypes.BIGINT, SqlTypes.DOUBLE)), SqlArgument.of(SqlLambda.of(4)))); verify(function).getReturnType( ImmutableList.of( SqlArgument.of(SqlTypes.array(SqlTypes.DOUBLE)), SqlArgument.of(SqlTypes.STRING), SqlArgument.of(SqlTypes.map(SqlTypes.BIGINT, SqlTypes.DOUBLE)), SqlArgument.of(SqlLambdaResolved .of(ImmutableList.of(SqlTypes.DOUBLE, SqlTypes.STRING, SqlTypes.BIGINT, SqlTypes.DOUBLE), SqlTypes.BIGINT)))); }
@Override public void report(final SortedMap<MetricName, Gauge> gauges, final SortedMap<MetricName, Counter> counters, final SortedMap<MetricName, Histogram> histograms, final SortedMap<MetricName, Meter> meters, final SortedMap<MetricName, Timer> timers) { final long now = System.currentTimeMillis(); if(logger.isDebugEnabled()) logger.debug("InfluxDbReporter report is called with counter size " + counters.size()); try { influxDb.flush(); for (Map.Entry<MetricName, Gauge> entry : gauges.entrySet()) { reportGauge(entry.getKey(), entry.getValue(), now); } for (Map.Entry<MetricName, Counter> entry : counters.entrySet()) { reportCounter(entry.getKey(), entry.getValue(), now); } for (Map.Entry<MetricName, Histogram> entry : histograms.entrySet()) { reportHistogram(entry.getKey(), entry.getValue(), now); } for (Map.Entry<MetricName, Meter> entry : meters.entrySet()) { reportMeter(entry.getKey(), entry.getValue(), now); } for (Map.Entry<MetricName, Timer> entry : timers.entrySet()) { reportTimer(entry.getKey(), entry.getValue(), now); } if (influxDb.hasSeriesData()) { influxDb.writeData(); } // reset counters for (Map.Entry<MetricName, Counter> entry : counters.entrySet()) { Counter counter = entry.getValue(); long count = counter.getCount(); counter.dec(count); } } catch (Exception e) { logger.error("Unable to report to InfluxDB. Discarding data.", e); } }
@Test public void reportsByteGaugeValues() throws Exception { reporter .report(map("gauge", gauge((byte) 1)), this.map(), this.map(), this.map(), this.map()); final ArgumentCaptor<InfluxDbPoint> influxDbPointCaptor = ArgumentCaptor.forClass(InfluxDbPoint.class); Mockito.verify(influxDb, atLeastOnce()).appendPoints(influxDbPointCaptor.capture()); InfluxDbPoint point = influxDbPointCaptor.getValue(); /* assertThat(point.getMeasurement()).isEqualTo("gauge"); assertThat(point.getFields()).isNotEmpty(); assertThat(point.getFields()).hasSize(1); assertThat(point.getFields()).contains(entry("value", (byte) 1)); */ }
public static Match match() { return new AutoValue_FileIO_Match.Builder() .setConfiguration(MatchConfiguration.create(EmptyMatchTreatment.DISALLOW)) .build(); }
@Test @Category(NeedsRunner.class) public void testMatchDisallowEmptyExplicit() throws IOException { p.apply( FileIO.match() .filepattern(tmpFolder.getRoot().getAbsolutePath() + "/*") .withEmptyMatchTreatment(EmptyMatchTreatment.DISALLOW)); thrown.expectCause(isA(FileNotFoundException.class)); p.run(); }
static boolean empty(String s) { return (s == null || s.equals("")); }
@Test public void testEmpty() { assertTrue(LogUtil.empty(null)); assertTrue(LogUtil.empty("")); assertFalse(LogUtil.empty("f")); assertFalse(LogUtil.empty("fo")); assertFalse(LogUtil.empty("foo")); }
@Override public ValidationResult validate(Object value) { if (value instanceof org.joda.time.DateTime && value.toString().endsWith("Z")) { return new ValidationResult.ValidationPassed(); } else { return new ValidationResult.ValidationFailed(value + " is not a valid date!"); } }
@Test public void testValidate() throws Exception { Validator v = new DateValidator(); assertFalse(v.validate(null).passed()); assertFalse(v.validate(9001).passed()); assertFalse(v.validate("").passed()); assertFalse(v.validate(new java.util.Date()).passed()); // Only joda datetime. assertTrue(v.validate(new org.joda.time.DateTime(DateTimeZone.UTC)).passed()); // Only accepts UTC. assertFalse(v.validate(new org.joda.time.DateTime(DateTimeZone.forID("+09:00"))).passed()); }
@Override public Integer getJavaVersion() { return jarJavaVersion; }
@Test public void testGetJavaVersion() { StandardPackagedProcessor standardPackagedProcessor = new StandardPackagedProcessor(Paths.get("ignore"), 8); assertThat(standardPackagedProcessor.getJavaVersion()).isEqualTo(8); }
public void startAsync() { try { udfLoader.load(); ProcessingLogServerUtils.maybeCreateProcessingLogTopic( serviceContext.getTopicClient(), processingLogConfig, ksqlConfig); if (processingLogConfig.getBoolean(ProcessingLogConfig.STREAM_AUTO_CREATE)) { log.warn("processing log auto-create is enabled, but this is not supported " + "for headless mode."); } rocksDBConfigSetterHandler.accept(ksqlConfig); processesQueryFile(readQueriesFile(queriesFile)); showWelcomeMessage(); final Properties properties = new Properties(); ksqlConfig.originals().forEach((key, value) -> { if (nonNull(value)) { properties.put(key, value.toString()); } }); versionChecker.start(KsqlModuleType.SERVER, properties); } catch (final Exception e) { log.error("Failed to start KSQL Server with query file: " + queriesFile, e); throw e; } }
@Test public void shouldNotStartValidationPhaseQueries() { // Given: givenFileContainsAPersistentQuery(); when(sandBox.execute(any(), any(ConfiguredStatement.class))) .thenReturn(ExecuteResult.of(sandBoxQuery)); // When: standaloneExecutor.startAsync(); // Then: verify(sandBoxQuery, never()).start(); }
public List<String> getVehicles() { return getEncodedValues().stream() .filter(ev -> ev.getName().endsWith("_access")) .map(ev -> ev.getName().replaceAll("_access", "")) .filter(v -> getEncodedValues().stream().anyMatch(ev -> ev.getName().contains(VehicleSpeed.key(v)))) .collect(Collectors.toList()); }
@Test public void testGetVehicles() { EncodingManager em = EncodingManager.start() .add(VehicleAccess.create("car")) .add(VehicleAccess.create("bike")).add(VehicleSpeed.create("bike", 4, 2, true)) .add(VehicleSpeed.create("roads", 5, 5, false)) .add(VehicleAccess.create("hike")).add(new DecimalEncodedValueImpl("whatever_hike_average_speed_2022", 5, 5, true)) .add(RoadAccess.create()) .build(); // only for bike+hike there is access+'speed' assertEquals(Arrays.asList("bike", "hike"), em.getVehicles()); }
public static <T> T newInstanceOrNull(Class<? extends T> clazz, Object... params) { Constructor<T> constructor = selectMatchingConstructor(clazz, params); if (constructor == null) { return null; } try { return constructor.newInstance(params); } catch (IllegalAccessException | InstantiationException | InvocationTargetException e) { return null; } }
@Test public void newInstanceOrNull_createInstanceWithSingleArgument() { ClassWithStringConstructor instance = InstantiationUtils.newInstanceOrNull(ClassWithStringConstructor.class, "foo"); assertNotNull(instance); }
public ExecCommandExecutor getCommandExecutor() { return commandExecutor; }
@Test @DirtiesContext public void testCreateEndpointCustomCommandExecutor() throws Exception { ExecEndpoint e = createExecEndpoint("exec:test?commandExecutor=#customExecutor"); assertSame(customExecutor, e.getCommandExecutor(), "Expected is the custom customExecutor reference from the application context"); }
public ArrayList<AnalysisResult<T>> getOutliers(Track<T> track) { // the stream is wonky due to the raw type, probably could be improved return track.points().stream() .map(point -> analyzePoint(point, track)) .filter(analysisResult -> analysisResult.isOutlier()) .collect(toCollection(ArrayList::new)); }
@Test public void testNoOutlier_2() { /* * Confirm that a flat altitude profile with a single 300-foot deviation (i.e. 3 times the * minimum possible altitude change) does not create an outlier. */ Track<NopHit> testTrack2 = createTrackFromResource(VerticalOutlierDetector.class, "NoAltitudeOutlier_2.txt"); Collection<AnalysisResult<NopHit>> outliers = (new VerticalOutlierDetector<NopHit>()).getOutliers(testTrack2); assertTrue( outliers.isEmpty(), "There should be no outliers, the test data contains the minimum possible altitude change" ); }
public static void isNull(Object object, String message) { if (object != null) { throw new IllegalArgumentException(message); } }
@Test(expected = IllegalArgumentException.class) public void assertIsNullAndMessageIsNull() { Assert.isNull(""); }
public Map<String, Object> getExtras() { return Collections.unmodifiableMap(extras); }
@Test public void testGetExtras() { Request request = new Request(); request.putExtra("a", "1"); assertThat(request.getExtras()).containsEntry("a", "1"); }
public static CharSequence escapeCsv(CharSequence value) { return escapeCsv(value, false); }
@Test public void escapeCsvWithAlreadyEscapedQuote() { CharSequence value = "foo\"\"goo"; CharSequence expected = "foo\"\"goo"; escapeCsv(value, expected); }
public int getASNum() { return asNum; }
@Test public void testConstruction() { LispAsAddress asAddress = address1; assertThat(asAddress.getASNum(), is(1)); }
public static String serializeRecordToJsonExpandingValue(ObjectMapper mapper, Record<GenericObject> record, boolean flatten) throws JsonProcessingException { JsonRecord jsonRecord = new JsonRecord(); GenericObject value = record.getValue(); if (value != null) { jsonRecord.setPayload(toJsonSerializable(record.getSchema(), value.getNativeObject())); } record.getKey().ifPresent(jsonRecord::setKey); record.getTopicName().ifPresent(jsonRecord::setTopicName); record.getEventTime().ifPresent(jsonRecord::setEventTime); record.getProperties().forEach(jsonRecord::addProperty); if (flatten) { JsonNode jsonNode = mapper.convertValue(jsonRecord, JsonNode.class); return JsonFlattener.flatten(new JacksonJsonValue(jsonNode)); } else { return mapper.writeValueAsString(jsonRecord); } }
@Test(dataProvider = "schemaType") public void testKeyValueSerializeNoValue(SchemaType schemaType) throws Exception { RecordSchemaBuilder keySchemaBuilder = org.apache.pulsar.client.api.schema.SchemaBuilder.record("key"); keySchemaBuilder.field("a").type(SchemaType.STRING).optional().defaultValue(null); GenericSchema<GenericRecord> keySchema = Schema.generic(keySchemaBuilder.build(schemaType)); RecordSchemaBuilder valueSchemaBuilder = org.apache.pulsar.client.api.schema.SchemaBuilder.record("value"); valueSchemaBuilder.field("c").type(SchemaType.STRING).optional().defaultValue(null); GenericSchema<GenericRecord> valueSchema = Schema.generic(valueSchemaBuilder.build(schemaType)); Schema<org.apache.pulsar.common.schema.KeyValue<GenericRecord, GenericRecord>> keyValueSchema = Schema.KeyValue(keySchema, valueSchema, KeyValueEncodingType.INLINE); org.apache.pulsar.common.schema.KeyValue<GenericRecord, GenericRecord> keyValue = new org.apache.pulsar.common.schema.KeyValue<>(null, null); GenericObject genericObject = new GenericObject() { @Override public SchemaType getSchemaType() { return SchemaType.KEY_VALUE; } @Override public Object getNativeObject() { return keyValue; } }; Record<GenericObject> genericObjectRecord = new Record<>() { @Override public Optional<String> getTopicName() { return Optional.of("data-ks1.table1"); } @Override public org.apache.pulsar.client.api.Schema getSchema() { return keyValueSchema; } @Override public Optional<String> getKey() { return Optional.of("message-key"); } @Override public GenericObject getValue() { return genericObject; } @Override public Map<String, String> getProperties() { return Collections.emptyMap(); } @Override public Optional<Long> getEventTime() { return Optional.of(1648502845803L); } }; ObjectMapper objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); String json = Utils.serializeRecordToJsonExpandingValue(objectMapper, genericObjectRecord, false); assertEquals(json, "{\"topicName\":\"data-ks1.table1\",\"key\":\"message-key\"," + "\"payload\":{}," + "\"eventTime\":1648502845803}"); json = Utils.serializeRecordToJsonExpandingValue(objectMapper, genericObjectRecord, true); assertEquals(json, "{\"topicName\":\"data-ks1.table1\",\"key\":\"message-key\"," + "\"payload\":{}," + "\"eventTime\":1648502845803}"); }
@Nullable @Override public Message decode(@Nonnull final RawMessage rawMessage) { final GELFMessage gelfMessage = new GELFMessage(rawMessage.getPayload(), rawMessage.getRemoteAddress()); final String json = gelfMessage.getJSON(decompressSizeLimit, charset); final JsonNode node; try { node = objectMapper.readTree(json); if (node == null) { throw new IOException("null result"); } } catch (final Exception e) { log.error("Could not parse JSON, first 400 characters: " + StringUtils.abbreviate(json, 403), e); throw new IllegalStateException("JSON is null/could not be parsed (invalid JSON)", e); } try { validateGELFMessage(node, rawMessage.getId(), rawMessage.getRemoteAddress()); } catch (IllegalArgumentException e) { log.trace("Invalid GELF message <{}>", node); throw e; } // Timestamp. final double messageTimestamp = timestampValue(node); final DateTime timestamp; if (messageTimestamp <= 0) { timestamp = rawMessage.getTimestamp(); } else { // we treat this as a unix timestamp timestamp = Tools.dateTimeFromDouble(messageTimestamp); } final Message message = messageFactory.createMessage( stringValue(node, "short_message"), stringValue(node, "host"), timestamp ); message.addField(Message.FIELD_FULL_MESSAGE, stringValue(node, "full_message")); final String file = stringValue(node, "file"); if (file != null && !file.isEmpty()) { message.addField("file", file); } final long line = longValue(node, "line"); if (line > -1) { message.addField("line", line); } // Level is set by server if not specified by client. final int level = intValue(node, "level"); if (level > -1) { message.addField("level", level); } // Facility is set by server if not specified by client. final String facility = stringValue(node, "facility"); if (facility != null && !facility.isEmpty()) { message.addField("facility", facility); } // Add additional data if there is some. final Iterator<Map.Entry<String, JsonNode>> fields = node.fields(); while (fields.hasNext()) { final Map.Entry<String, JsonNode> entry = fields.next(); String key = entry.getKey(); // Do not index useless GELF "version" field. if ("version".equals(key)) { continue; } // Don't include GELF syntax underscore in message field key. if (key.startsWith("_") && key.length() > 1) { key = key.substring(1); } // We already set short_message and host as message and source. Do not add as fields again. if ("short_message".equals(key) || "host".equals(key)) { continue; } // Skip standard or already set fields. if (message.getField(key) != null || Message.RESERVED_FIELDS.contains(key) && !Message.RESERVED_SETTABLE_FIELDS.contains(key)) { continue; } // Convert JSON containers to Strings, and pick a suitable number representation. final JsonNode value = entry.getValue(); final Object fieldValue; if (value.isContainerNode()) { fieldValue = value.toString(); } else if (value.isFloatingPointNumber()) { fieldValue = value.asDouble(); } else if (value.isIntegralNumber()) { fieldValue = value.asLong(); } else if (value.isNull()) { log.debug("Field [{}] is NULL. Skipping.", key); continue; } else if (value.isTextual()) { fieldValue = value.asText(); } else { log.debug("Field [{}] has unknown value type. Skipping.", key); continue; } message.addField(key, fieldValue); } return message; }
@Test public void decodeSucceedsWithMinimalMessages() throws Exception { assertThat(codec.decode(new RawMessage("{\"short_message\":\"0\"}".getBytes(StandardCharsets.UTF_8)))).isNotNull(); assertThat(codec.decode(new RawMessage("{\"message\":\"0\"}".getBytes(StandardCharsets.UTF_8)))).isNotNull(); }
public String getBuilderName(String propertyName, JsonNode node) { propertyName = getPropertyNameForAccessor(propertyName, node); String prefix = "with"; if (propertyName.length() > 1 && Character.isUpperCase(propertyName.charAt(1))) { return prefix + propertyName; } else { return prefix + capitalize(propertyName); } }
@Test public void testBuilderNamedCorrectly() { assertThat(nameHelper.getBuilderName("foo", NODE), is("withFoo")); assertThat(nameHelper.getBuilderName("oAuth2State", NODE), is("withoAuth2State")); assertThat(nameHelper.getBuilderName("URL", NODE), is("withUrl")); }
public void pop() { Preconditions.checkState(!stack.isEmpty(), "Error in file properties stack pop, popping at 0"); stack.remove(stack.size() - 1); updateProperties(); }
@Test public void testPop_nothingToPop() { FilePropertiesStack testStack = new FilePropertiesStack(); try { testStack.pop(); Assert.fail(); } catch (IllegalStateException ise) { Assert.assertEquals("Error in file properties stack pop, popping at 0", ise.getMessage()); } }
public void launch(Monitored mp) { if (!lifecycle.tryToMoveTo(Lifecycle.State.STARTING)) { throw new IllegalStateException("Already started"); } monitored = mp; Logger logger = LoggerFactory.getLogger(getClass()); try { launch(logger); } catch (Exception e) { logger.warn("Fail to start {}", processId.getHumanReadableName(), e); hardStop(); } }
@Test public void fail_to_launch_multiple_times() throws IOException { Props props = createProps(); ProcessEntryPoint entryPoint = new ProcessEntryPoint(props, exit, commands, runtime); entryPoint.launch(new NoopProcess()); try { entryPoint.launch(new NoopProcess()); fail(); } catch (IllegalStateException e) { assertThat(e).hasMessage("Already started"); } }
@Override public int getMaxColumnNameLength() { return 0; }
@Test void assertGetMaxColumnNameLength() { assertThat(metaData.getMaxColumnNameLength(), is(0)); }
@Override public String execute(SampleResult previousResult, Sampler currentSampler) throws InvalidVariableException { JMeterVariables vars = getVariables(); String stringToSplit = ((CompoundVariable) values[0]).execute(); String varNamePrefix = ((CompoundVariable) values[1]).execute().trim(); String splitString = ","; if (values.length > 2) { // Split string provided String newSplitString = ((CompoundVariable) values[2]).execute(); splitString = newSplitString.length() > 0 ? newSplitString : splitString; } log.debug("Split {} using {} into {}", stringToSplit, splitString, varNamePrefix); String[] parts = JOrphanUtils.split(stringToSplit, splitString, "?");// $NON-NLS-1$ vars.put(varNamePrefix, stringToSplit); vars.put(varNamePrefix + "_n", Integer.toString(parts.length));// $NON-NLS-1$ for (int i = 1; i <= parts.length; i++) { if (log.isDebugEnabled()){ log.debug(parts[i-1]); } vars.put(varNamePrefix + "_" + i, parts[i - 1]);// $NON-NLS-1$ } vars.remove(varNamePrefix + "_" + (parts.length+1)); return stringToSplit; }
@Test public void shouldSplitWithPreviousResultOnly() throws Exception { String src = "a,,c,"; vars.put("VAR", src); SplitFunction split = splitParams("${VAR}", "VAR5", null); SampleResult previousResult = new SampleResult(); previousResult.setResponseData("Some data", null); assertEquals(src, split.execute(previousResult, null)); assertEquals("4", vars.get("VAR5_n")); assertEquals("a", vars.get("VAR5_1")); assertEquals("?", vars.get("VAR5_2")); assertEquals("c", vars.get("VAR5_3")); assertEquals("?", vars.get("VAR5_4")); assertNull(vars.get("VAR5_5")); }
@Override public String toString(@Nullable String root, Iterable<String> names) { StringBuilder builder = new StringBuilder(); if (root != null) { builder.append(root); } joiner().appendTo(builder, names); return builder.toString(); }
@Test public void testUnix_toUri_escaping() { URI uri = PathType.unix().toUri(fileSystemUri, "/", ImmutableList.of("foo bar"), false); assertThat(uri.toString()).isEqualTo("jimfs://foo/foo%20bar"); assertThat(uri.getRawPath()).isEqualTo("/foo%20bar"); assertThat(uri.getPath()).isEqualTo("/foo bar"); }
@Override public CompletableFuture<YarnWorkerNode> requestResource( TaskExecutorProcessSpec taskExecutorProcessSpec) { checkInitialized(); final CompletableFuture<YarnWorkerNode> requestResourceFuture = new CompletableFuture<>(); final Optional<TaskExecutorProcessSpecContainerResourcePriorityAdapter.PriorityAndResource> priorityAndResourceOpt = taskExecutorProcessSpecContainerResourcePriorityAdapter .getPriorityAndResource(taskExecutorProcessSpec); if (!priorityAndResourceOpt.isPresent()) { requestResourceFuture.completeExceptionally( new ResourceManagerException( String.format( "Could not compute the container Resource from the given TaskExecutorProcessSpec %s. " + "This usually indicates the requested resource is larger than Yarn's max container resource limit.", taskExecutorProcessSpec))); } else { final Priority priority = priorityAndResourceOpt.get().getPriority(); final Resource resource = priorityAndResourceOpt.get().getResource(); FutureUtils.assertNoException( requestResourceFuture.handle( (ignore, t) -> { if (t == null) { return null; } if (t instanceof CancellationException) { final Queue<CompletableFuture<YarnWorkerNode>> pendingRequestResourceFutures = requestResourceFutures.getOrDefault( taskExecutorProcessSpec, new LinkedList<>()); Preconditions.checkState( pendingRequestResourceFutures.remove( requestResourceFuture)); log.info( "cancelling pending request with priority {}, remaining {} pending container requests.", priority, pendingRequestResourceFutures.size()); int pendingRequestsSizeBeforeCancel = pendingRequestResourceFutures.size() + 1; final Iterator<AMRMClient.ContainerRequest> pendingContainerRequestIterator = getPendingRequestsAndCheckConsistency( priority, resource, pendingRequestsSizeBeforeCancel) .iterator(); Preconditions.checkState( pendingContainerRequestIterator.hasNext()); final AMRMClient.ContainerRequest pendingRequest = pendingContainerRequestIterator.next(); removeContainerRequest(pendingRequest); if (pendingRequestResourceFutures.isEmpty()) { requestResourceFutures.remove(taskExecutorProcessSpec); } if (getNumRequestedNotAllocatedWorkers() <= 0) { resourceManagerClient.setHeartbeatInterval( yarnHeartbeatIntervalMillis); } } else { log.error("Error completing resource request.", t); ExceptionUtils.rethrow(t); } return null; })); addContainerRequest(resource, priority); // make sure we transmit the request fast and receive fast news of granted allocations resourceManagerClient.setHeartbeatInterval(containerRequestHeartbeatIntervalMillis); requestResourceFutures .computeIfAbsent(taskExecutorProcessSpec, ignore -> new LinkedList<>()) .add(requestResourceFuture); log.info( "Requesting new TaskExecutor container with resource {}, priority {}.", taskExecutorProcessSpec, priority); } return requestResourceFuture; }
@Test void testRunAsyncCausesFatalError() throws Exception { new Context() { { final String exceptionMessage = "runAsyncCausesFatalError"; addContainerRequestFutures.add(CompletableFuture.completedFuture(null)); testingYarnAMRMClientAsyncBuilder.setGetMatchingRequestsFunction( ignored -> { throw new RuntimeException(exceptionMessage); }); final CompletableFuture<Throwable> throwableCompletableFuture = new CompletableFuture<>(); resourceEventHandlerBuilder.setOnErrorConsumer( throwableCompletableFuture::complete); runTest( () -> { runInMainThread( () -> getDriver() .requestResource( testingTaskExecutorProcessSpec)); resourceManagerClientCallbackHandler.onContainersAllocated( ImmutableList.of(testingContainer)); Throwable t = throwableCompletableFuture.get(TIMEOUT_SEC, TimeUnit.SECONDS); final Optional<RuntimeException> optionalCause = ExceptionUtils.findThrowable(t, RuntimeException.class); assertThat(optionalCause).isPresent(); assertThat(optionalCause.get()).hasMessage(exceptionMessage); }); } }; }
@Override public Optional<ShardingConditionValue> generate(final InExpression predicate, final Column column, final List<Object> params, final TimestampServiceRule timestampServiceRule) { if (predicate.isNot()) { return Optional.empty(); } Collection<ExpressionSegment> expressionSegments = predicate.getExpressionList(); List<Integer> parameterMarkerIndexes = new ArrayList<>(expressionSegments.size()); List<Comparable<?>> shardingConditionValues = new LinkedList<>(); for (ExpressionSegment each : expressionSegments) { ConditionValue conditionValue = new ConditionValue(each, params); Optional<Comparable<?>> value = conditionValue.getValue(); if (conditionValue.isNull()) { shardingConditionValues.add(null); conditionValue.getParameterMarkerIndex().ifPresent(parameterMarkerIndexes::add); continue; } if (value.isPresent()) { shardingConditionValues.add(value.get()); conditionValue.getParameterMarkerIndex().ifPresent(parameterMarkerIndexes::add); continue; } if (ExpressionConditionUtils.isNowExpression(each)) { shardingConditionValues.add(timestampServiceRule.getTimestamp()); } } return shardingConditionValues.isEmpty() ? Optional.empty() : Optional.of(new ListShardingConditionValue<>(column.getName(), column.getTableName(), shardingConditionValues, parameterMarkerIndexes)); }
@Test void assertNotInExpression() { ColumnSegment left = new ColumnSegment(0, 0, new IdentifierValue("id")); ListExpression right = new ListExpression(0, 0); right.getItems().add(new ParameterMarkerExpressionSegment(0, 0, 0)); InExpression inExpression = new InExpression(0, 0, left, right, true); Optional<ShardingConditionValue> shardingConditionValue = generator.generate(inExpression, column, Collections.singletonList(1), timestampServiceRule); assertFalse(shardingConditionValue.isPresent()); }
public FEELFnResult<TemporalAccessor> invoke(@ParameterName("from") String val) { if ( val == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null")); } try { TemporalAccessor parsed = FEEL_TIME.parse(val); if (parsed.query(TemporalQueries.offset()) != null) { // it is an offset-zoned time, so I can know for certain an OffsetTime OffsetTime asOffSetTime = parsed.query(OffsetTime::from); return FEELFnResult.ofResult(asOffSetTime); } else if (parsed.query(TemporalQueries.zone()) == null) { // if it does not contain any zone information at all, then I know for certain is a local time. LocalTime asLocalTime = parsed.query(LocalTime::from); return FEELFnResult.ofResult(asLocalTime); } else if (parsed.query(TemporalQueries.zone()) != null) { boolean hasSeconds = timeStringWithSeconds(val); LocalTime asLocalTime = parsed.query(LocalTime::from); ZoneId zoneId = parsed.query(TemporalQueries.zone()); ZoneTime zoneTime = ZoneTime.of(asLocalTime, zoneId, hasSeconds); return FEELFnResult.ofResult(zoneTime); } return FEELFnResult.ofResult(parsed); } catch (DateTimeException e) { return manageDateTimeException(e, val); } }
@Test void invokeTimeUnitsParamsNull() { FunctionTestUtil.assertResultError(timeFunction.invoke(null, null, null, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(timeFunction.invoke(null, null, 1, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(timeFunction.invoke(null, 1, null, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(timeFunction.invoke(null, 1, 1, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(timeFunction.invoke(1, null, null, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(timeFunction.invoke(1, null, 1, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(timeFunction.invoke(1, 1, null, null), InvalidParametersEvent.class); }
@SuppressWarnings("unchecked") @Override public boolean canHandleReturnType(Class returnType) { return rxSupportedTypes.stream() .anyMatch(classType -> classType.isAssignableFrom(returnType)); }
@Test public void testCheckTypes() { assertThat(rxJava2RetryAspectExt.canHandleReturnType(Flowable.class)).isTrue(); assertThat(rxJava2RetryAspectExt.canHandleReturnType(Single.class)).isTrue(); }
@Override public <T> UncommittedBundle<T> createRootBundle() { return underlying.createRootBundle(); }
@Test public void rootBundleSucceeds() { UncommittedBundle<byte[]> root = factory.createRootBundle(); byte[] array = new byte[] {0, 1, 2}; root.add(WindowedValue.valueInGlobalWindow(array)); CommittedBundle<byte[]> committed = root.commit(Instant.now()); assertThat( committed.getElements(), containsInAnyOrder(WindowedValue.valueInGlobalWindow(array))); }
private KvSwap() {}
@Test @Category(ValidatesRunner.class) public void testKvSwap() { PCollection<KV<String, Integer>> input = p.apply( Create.of(Arrays.asList(TABLE)) .withCoder( KvCoder.of( StringUtf8Coder.of(), NullableCoder.of(BigEndianIntegerCoder.of())))); PCollection<KV<Integer, String>> output = input.apply(KvSwap.create()); PAssert.that(output) .containsInAnyOrder( KV.of(1, "one"), KV.of(2, "two"), KV.of(3, "three"), KV.of(4, "four"), KV.of(4, "dup"), KV.of(5, "dup"), KV.of((Integer) null, "null")); p.run(); }
@Override public Iterator<V> descendingIterator() { return new Iterator<V>() { private int currentIndex = size(); private boolean removeExecuted; @Override public boolean hasNext() { int size = size(); return currentIndex > 0 && size > 0; } @Override public V next() { if (!hasNext()) { throw new NoSuchElementException("No such element at index " + currentIndex); } currentIndex--; removeExecuted = false; return RedissonDeque.this.get(currentIndex); } @Override public void remove() { if (removeExecuted) { throw new IllegalStateException("Element been already deleted"); } RedissonDeque.this.remove(currentIndex); currentIndex++; removeExecuted = true; } }; }
@Test public void testDescendingIteratorOrigin() { final Deque<Integer> queue = new ArrayDeque<Integer>(); queue.addAll(Arrays.asList(1, 2, 3)); assertThat(queue.descendingIterator()).toIterable().containsExactly(3, 2, 1); }
@Override public Enumeration<URL> getResources(String name) throws IOException { List<URL> resources = new ArrayList<>(); ClassLoadingStrategy loadingStrategy = getClassLoadingStrategy(name); log.trace("Received request to load resources '{}'", name); for (ClassLoadingStrategy.Source classLoadingSource : loadingStrategy.getSources()) { switch (classLoadingSource) { case APPLICATION: if (getParent() != null) { resources.addAll(Collections.list(getParent().getResources(name))); } break; case PLUGIN: resources.addAll(Collections.list(findResources(name))); break; case DEPENDENCIES: resources.addAll(findResourcesFromDependencies(name)); break; } } return Collections.enumeration(resources); }
@Test void parentLastGetResourcesExistsOnlyInPlugin() throws IOException, URISyntaxException { Enumeration<URL> resources = parentLastPluginClassLoader.getResources("META-INF/plugin-file"); assertNumberOfResourcesAndFirstLineOfFirstElement(1, "plugin", resources); }
public List<Chapter> getChapters() { return chapters; }
@Test public void testRealFileMp3chapsPy() throws IOException, ID3ReaderException { CountingInputStream inputStream = new CountingInputStream(getClass().getClassLoader() .getResource("mp3chaps-py.mp3").openStream()); ChapterReader reader = new ChapterReader(inputStream); reader.readInputStream(); List<Chapter> chapters = reader.getChapters(); assertEquals(4, chapters.size()); assertEquals(0, chapters.get(0).getStart()); assertEquals(7000, chapters.get(1).getStart()); assertEquals(9000, chapters.get(2).getStart()); assertEquals(11000, chapters.get(3).getStart()); assertEquals("Start", chapters.get(0).getTitle()); assertEquals("Chapter 1", chapters.get(1).getTitle()); assertEquals("Chapter 2", chapters.get(2).getTitle()); assertEquals("Chapter 3", chapters.get(3).getTitle()); }
@Override public boolean isFinished() { return finishing && outputPage == null; }
@Test(dataProvider = "hashEnabledValues") public void testSemiJoin(boolean hashEnabled) { DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext(); // build OperatorContext operatorContext = driverContext.addOperatorContext(0, new PlanNodeId("test"), ValuesOperator.class.getSimpleName()); RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), BIGINT); Operator buildOperator = new ValuesOperator(operatorContext, rowPagesBuilder .row(10L) .row(30L) .row(30L) .row(35L) .row(36L) .row(37L) .row(50L) .build()); SetBuilderOperatorFactory setBuilderOperatorFactory = new SetBuilderOperatorFactory( 1, new PlanNodeId("test"), rowPagesBuilder.getTypes().get(0), 0, rowPagesBuilder.getHashChannel(), 10, new JoinCompiler(createTestMetadataManager())); Operator setBuilderOperator = setBuilderOperatorFactory.createOperator(driverContext); Driver driver = Driver.createDriver(driverContext, buildOperator, setBuilderOperator); while (!driver.isFinished()) { driver.process(); } // probe List<Type> probeTypes = ImmutableList.of(BIGINT, BIGINT); RowPagesBuilder rowPagesBuilderProbe = rowPagesBuilder(hashEnabled, Ints.asList(0), BIGINT, BIGINT); List<Page> probeInput = rowPagesBuilderProbe .addSequencePage(10, 30, 0) .build(); Optional<Integer> probeHashChannel = hashEnabled ? Optional.of(probeTypes.size()) : Optional.empty(); HashSemiJoinOperatorFactory joinOperatorFactory = new HashSemiJoinOperatorFactory( 2, new PlanNodeId("test"), setBuilderOperatorFactory.getSetProvider(), rowPagesBuilderProbe.getTypes(), 0, probeHashChannel); // expected MaterializedResult expected = resultBuilder(driverContext.getSession(), concat(probeTypes, ImmutableList.of(BOOLEAN))) .row(30L, 0L, true) .row(31L, 1L, false) .row(32L, 2L, false) .row(33L, 3L, false) .row(34L, 4L, false) .row(35L, 5L, true) .row(36L, 6L, true) .row(37L, 7L, true) .row(38L, 8L, false) .row(39L, 9L, false) .build(); OperatorAssertion.assertOperatorEquals(joinOperatorFactory, driverContext, probeInput, expected, hashEnabled, ImmutableList.of(probeTypes.size())); }
@Bean public FilterRegistrationBean<HttpRequestContextFilter> requestContextFilterRegistration( HttpRequestContextFilter requestContextFilter) { FilterRegistrationBean<HttpRequestContextFilter> registration = new FilterRegistrationBean<>(); registration.setFilter(requestContextFilter); registration.addUrlPatterns("/*"); registration.setName("nacosRequestContextFilter"); registration.setOrder(Integer.MIN_VALUE); return registration; }
@Test void testRequestContextFilterRegistration() { HttpRequestContextConfig contextConfig = new HttpRequestContextConfig(); HttpRequestContextFilter filter = contextConfig.nacosRequestContextFilter(); FilterRegistrationBean<HttpRequestContextFilter> actual = contextConfig.requestContextFilterRegistration( filter); assertEquals(filter, actual.getFilter()); assertEquals("/*", actual.getUrlPatterns().iterator().next()); assertEquals(Integer.MIN_VALUE, actual.getOrder()); }
public ProtocolBuilder prompt(String prompt) { this.prompt = prompt; return getThis(); }
@Test void prompt() { ProtocolBuilder builder = new ProtocolBuilder(); builder.prompt("prompt"); Assertions.assertEquals("prompt", builder.build().getPrompt()); }
public static Object typeConvert(String tableName ,String columnName, String value, int sqlType, String mysqlType) { if (value == null || (value.equals("") && !(isText(mysqlType) || sqlType == Types.CHAR || sqlType == Types.VARCHAR || sqlType == Types.LONGVARCHAR))) { return null; } try { Object res; switch (sqlType) { case Types.INTEGER: res = Integer.parseInt(value); break; case Types.SMALLINT: res = Short.parseShort(value); break; case Types.BIT: case Types.TINYINT: res = Byte.parseByte(value); break; case Types.BIGINT: if (mysqlType.startsWith("bigint") && mysqlType.endsWith("unsigned")) { res = new BigInteger(value); } else { res = Long.parseLong(value); } break; // case Types.BIT: case Types.BOOLEAN: res = !"0".equals(value); break; case Types.DOUBLE: case Types.FLOAT: res = Double.parseDouble(value); break; case Types.REAL: res = Float.parseFloat(value); break; case Types.DECIMAL: case Types.NUMERIC: res = new BigDecimal(value); break; case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: case Types.BLOB: res = value.getBytes("ISO-8859-1"); break; case Types.DATE: if (!value.startsWith("0000-00-00")) { java.util.Date date = Util.parseDate(value); if (date != null) { res = new Date(date.getTime()); } else { res = null; } } else { res = null; } break; case Types.TIME: { java.util.Date date = Util.parseDate(value); if (date != null) { res = new Time(date.getTime()); } else { res = null; } break; } case Types.TIMESTAMP: if (!value.startsWith("0000-00-00")) { java.util.Date date = Util.parseDate(value); if (date != null) { res = new Timestamp(date.getTime()); } else { res = null; } } else { res = null; } break; case Types.CLOB: default: res = value; break; } return res; } catch (Exception e) { logger.error("table: {} column: {}, failed convert type {} to {}", tableName, columnName, value, sqlType); return value; } }
@Test public void typeConvertInputNotNullNotNullNotNullPositiveNotNullOutputTrue() { // Arrange final String tableName = "?????????"; final String columnName = "?"; final String value = "5"; final int sqlType = 16; final String mysqlType = "bigintsunsigned"; // Act final Object actual = JdbcTypeUtil.typeConvert(tableName, columnName, value, sqlType, mysqlType); // Assert result Assert.assertTrue((boolean)actual); }
public void run() { try { InputStreamReader isr = new InputStreamReader( this.is ); BufferedReader br = new BufferedReader( isr ); String line = null; while ( ( line = br.readLine() ) != null ) { String logEntry = this.type + " " + line; switch ( this.logLevel ) { case MINIMAL: log.logMinimal( logEntry ); break; case BASIC: log.logBasic( logEntry ); break; case DETAILED: log.logDetailed( logEntry ); break; case DEBUG: log.logDebug( logEntry ); break; case ROWLEVEL: log.logRowlevel( logEntry ); break; case ERROR: log.logError( logEntry ); break; default: // NONE break; } } } catch ( IOException ioe ) { if ( log.isError() ) { log.logError( this.type + " " + Const.getStackTracker( ioe ) ); } } }
@Test public void testLogError() { streamLogger = new ConfigurableStreamLogger( log, is, LogLevel.ERROR, PREFIX ); streamLogger.run(); Mockito.verify( log ).logError( OUT1 ); Mockito.verify( log ).logError( OUT2 ); }
public static ClusterHealthStatus isHealth(List<RemoteInstance> remoteInstances) { if (CollectionUtils.isEmpty(remoteInstances)) { return ClusterHealthStatus.unHealth("can't get the instance list"); } if (!CoreModuleConfig.Role.Receiver.equals(ROLE)) { List<RemoteInstance> selfInstances = remoteInstances.stream(). filter(remoteInstance -> remoteInstance.getAddress().isSelf()).collect(Collectors.toList()); if (CollectionUtils.isEmpty(selfInstances)) { return ClusterHealthStatus.unHealth("can't get itself"); } } if (remoteInstances.size() > 1 && hasIllegalNodeAddress(remoteInstances)) { return ClusterHealthStatus.unHealth("find illegal node in cluster mode such as 127.0.0.1, localhost"); } return ClusterHealthStatus.HEALTH; }
@Test public void healthWhenReceiverRoleWithEmptySelfInstance() { List<RemoteInstance> remoteInstances = new ArrayList<>(); remoteInstances.add(new RemoteInstance(new Address("192.168.0.1", 8892, false))); OAPNodeChecker.setROLE(CoreModuleConfig.Role.Receiver); ClusterHealthStatus clusterHealthStatus = OAPNodeChecker.isHealth(remoteInstances); Assertions.assertTrue(clusterHealthStatus.isHealth()); }
public Filter parseSingleExpression(final String filterExpression, final List<EntityAttribute> attributes) { if (!filterExpression.contains(FIELD_AND_VALUE_SEPARATOR)) { throw new IllegalArgumentException(WRONG_FILTER_EXPR_FORMAT_ERROR_MSG); } final String[] split = filterExpression.split(FIELD_AND_VALUE_SEPARATOR, 2); final String fieldPart = split[0]; if (fieldPart == null || fieldPart.isEmpty()) { throw new IllegalArgumentException(WRONG_FILTER_EXPR_FORMAT_ERROR_MSG); } final String valuePart = split[1]; if (valuePart == null || valuePart.isEmpty()) { throw new IllegalArgumentException(WRONG_FILTER_EXPR_FORMAT_ERROR_MSG); } final EntityAttribute attributeMetaData = getAttributeMetaData(attributes, fieldPart); final SearchQueryField.Type fieldType = attributeMetaData.type(); if (isRangeValueExpression(valuePart, fieldType)) { if (valuePart.startsWith(RANGE_VALUES_SEPARATOR)) { return new RangeFilter(attributeMetaData.id(), null, extractValue(fieldType, valuePart.substring(RANGE_VALUES_SEPARATOR.length())) ); } else if (valuePart.endsWith(RANGE_VALUES_SEPARATOR)) { return new RangeFilter(attributeMetaData.id(), extractValue(fieldType, valuePart.substring(0, valuePart.length() - RANGE_VALUES_SEPARATOR.length())), null ); } else { final String[] ranges = valuePart.split(RANGE_VALUES_SEPARATOR); return new RangeFilter(attributeMetaData.id(), extractValue(fieldType, ranges[0]), extractValue(fieldType, ranges[1]) ); } } else { return new SingleValueFilter(attributeMetaData.id(), extractValue(fieldType, valuePart)); } }
@Test void parsesFilterExpressionForStringFieldsCorrectlyEvenIfValueContainsRangeSeparator() { final List<EntityAttribute> entityAttributes = List.of(EntityAttribute.builder() .id("text") .title("Text") .type(SearchQueryField.Type.STRING) .filterable(true) .build()); assertEquals( new SingleValueFilter("text", "42" + RANGE_VALUES_SEPARATOR + "53"), toTest.parseSingleExpression("text:42" + RANGE_VALUES_SEPARATOR + "53", entityAttributes )); }
@Override public String requestMessageForIsRepositoryConfigurationValid(RepositoryConfiguration repositoryConfiguration) { Map configuredValues = new LinkedHashMap(); configuredValues.put("repository-configuration", jsonResultMessageHandler.configurationToMap(repositoryConfiguration)); return GSON.toJson(configuredValues); }
@Test public void shouldBuildRequestBodyForCheckRepositoryConfigurationValidRequest() throws Exception { String requestMessage = messageHandler.requestMessageForIsRepositoryConfigurationValid(repositoryConfiguration); assertThat(requestMessage, is("{\"repository-configuration\":{\"key-one\":{\"value\":\"value-one\"},\"key-two\":{\"value\":\"value-two\"}}}")); }
@Override public Optional<RegistryAuthenticator> handleHttpResponseException( ResponseException responseException) throws ResponseException, RegistryErrorException { // Only valid for status code of '401 Unauthorized'. if (responseException.getStatusCode() != HttpStatusCodes.STATUS_CODE_UNAUTHORIZED) { throw responseException; } // Checks if the 'WWW-Authenticate' header is present. String authenticationMethod = responseException.getHeaders().getAuthenticate(); if (authenticationMethod == null) { throw new RegistryErrorExceptionBuilder(getActionDescription(), responseException) .addReason("'WWW-Authenticate' header not found") .build(); } // Parses the header to retrieve the components. try { return RegistryAuthenticator.fromAuthenticationMethod( authenticationMethod, registryEndpointRequestProperties, userAgent, httpClient); } catch (RegistryAuthenticationFailedException ex) { throw new RegistryErrorExceptionBuilder(getActionDescription(), ex) .addReason("Failed get authentication method from 'WWW-Authenticate' header") .build(); } }
@Test public void testHandleHttpResponseException_noHeader() throws ResponseException { Mockito.when(mockResponseException.getStatusCode()) .thenReturn(HttpStatusCodes.STATUS_CODE_UNAUTHORIZED); Mockito.when(mockResponseException.getHeaders()).thenReturn(mockHeaders); Mockito.when(mockHeaders.getAuthenticate()).thenReturn(null); try { testAuthenticationMethodRetriever.handleHttpResponseException(mockResponseException); Assert.fail( "Authentication method retriever should fail if 'WWW-Authenticate' header is not found"); } catch (RegistryErrorException ex) { MatcherAssert.assertThat( ex.getMessage(), CoreMatchers.containsString("'WWW-Authenticate' header not found")); } }
public NewIssuesNotification newNewIssuesNotification(Map<String, UserDto> assigneesByUuid) { verifyAssigneesByUuid(assigneesByUuid); return new NewIssuesNotification(new DetailsSupplierImpl(assigneesByUuid)); }
@Test public void newNewIssuesNotification_DetailsSupplier_getRuleDefinitionByRuleKey_always_returns_empty_if_RuleRepository_is_empty() { NewIssuesNotification underTest = this.underTest.newNewIssuesNotification(emptyMap()); DetailsSupplier detailsSupplier = readDetailsSupplier(underTest); assertThat(detailsSupplier.getRuleDefinitionByRuleKey(RuleKey.of("foo", "bar"))).isEmpty(); assertThat(detailsSupplier.getRuleDefinitionByRuleKey(RuleKey.of("bar", "foo"))).isEmpty(); }
@POST @Consumes(MediaType.TEXT_PLAIN) @Timed @ApiOperation("Add a list of new patterns") @AuditEvent(type = AuditEventTypes.GROK_PATTERN_IMPORT_CREATE) public Response bulkUpdatePatternsFromTextFile(@ApiParam(name = "patterns", required = true) @NotNull InputStream patternsFile, // deprecated. used to drop all existing patterns before import @Deprecated @QueryParam("replace") @DefaultValue("false") boolean deprecatedDropAllExisting, @ApiParam(name = "import-strategy", value = "Strategy to apply when importing.") @QueryParam("import-strategy") ImportStrategy importStrategy) throws ValidationException, IOException { checkPermission(RestPermissions.INPUTS_CREATE); final List<GrokPattern> grokPatterns = readGrokPatterns(patternsFile); if (!grokPatterns.isEmpty()) { try { if (!grokPatternService.validateAll(grokPatterns)) { throw new ValidationException("Invalid pattern contained. Did not save any patterns."); } } catch (GrokException | IllegalArgumentException e) { throw new ValidationException("Invalid pattern. Did not save any patterns\n" + e.getMessage()); } ImportStrategy resolvedStrategy = importStrategy != null ? importStrategy : deprecatedDropAllExisting ? ImportStrategy.DROP_ALL_EXISTING : ImportStrategy.ABORT_ON_CONFLICT; grokPatternService.saveAll(grokPatterns, resolvedStrategy); } return Response.accepted().build(); }
@Test public void bulkUpdatePatternsFromTextFileWithCR() throws Exception { final String patterns = Arrays.stream(GROK_LINES).collect(Collectors.joining("\r")); final ByteArrayInputStream inputStream = new ByteArrayInputStream(patterns.getBytes(StandardCharsets.UTF_8)); final GrokPattern expectedPattern = GrokPattern.create("TEST_PATTERN_0", "Foo"); final Response response = grokResource.bulkUpdatePatternsFromTextFile(inputStream, true, null); assertThat(response.getStatusInfo()).isEqualTo(Response.Status.ACCEPTED); assertThat(response.hasEntity()).isFalse(); await() .atMost(Durations.FIVE_SECONDS) .until(() -> !subscriber.events.isEmpty()); assertThat(subscriber.events) .containsOnly(GrokPatternsUpdatedEvent.create(Collections.singleton(expectedPattern.name()))); }
@Override public long pageIdx2size(int pageIdx) { return sizeClass.pageIdx2size(pageIdx); }
@Test public void testPageIdx2size() { SizeClasses sc = new SizeClasses(PAGE_SIZE, PAGE_SHIFTS, CHUNK_SIZE, 0); PoolArena<ByteBuffer> arena = new PoolArena.DirectArena(null, sc); for (int i = 0; i < arena.sizeClass.nPSizes; i++) { assertEquals(arena.sizeClass.pageIdx2sizeCompute(i), arena.sizeClass.pageIdx2size(i)); } }
@Override public HttpResponse send(HttpRequest httpRequest) throws IOException { return send(httpRequest, null); }
@Test public void send_whenRequestFailed_throwsException() { assertThrows( IOException.class, () -> httpClient.send(get("http://unknownhost/path").withEmptyHeaders().build())); }
public static boolean isLinkLocalAddress(byte[] targetIp) { checkArgument(targetIp.length == Ip6Address.BYTE_LENGTH); return (targetIp[0] & 0xff) == 0xfe && (targetIp[1] & 0xc0) == 0x80; }
@Test public void testIsLinkLocalAddress() { assertFalse(isLinkLocalAddress(SOURCE_ADDRESS)); assertFalse(isLinkLocalAddress(DESTINATION_ADDRESS)); assertFalse(isLinkLocalAddress(SOLICITATION_NODE_ADDRESS)); assertTrue(isLinkLocalAddress(LINK_LOCAL_ADDRESS_1)); assertTrue(isLinkLocalAddress(LINK_LOCAL_ADDRESS_2)); }
public static InternalLogger getInstance(Class<?> clazz) { return getInstance(clazz.getName()); }
@Test public void testIsInfoEnabled() { when(mockLogger.isInfoEnabled()).thenReturn(true); InternalLogger logger = InternalLoggerFactory.getInstance("mock"); assertTrue(logger.isInfoEnabled()); verify(mockLogger).isInfoEnabled(); }
public static boolean isSecond(long ts) { return (ts & SECOND_MASK) == 0; }
@Test public void testIsSecond() { Assert.assertFalse(TimeUtils.isSecond(System.currentTimeMillis())); Assert.assertTrue(TimeUtils.isSecond(System.currentTimeMillis() / 1000)); }
Map<ProjectionProducer<PTransform<?, ?>>, Map<PCollection<?>, FieldAccessDescriptor>> getPushdownOpportunities() { return pushdownOpportunities.build(); }
@Test public void testProjectionProducerInsideNonProducer_returnsInnerPushdown() { Pipeline p = Pipeline.create(); CompositeTransformWithPushdownInside source = new CompositeTransformWithPushdownInside(); PCollection<Row> output = p.apply(source); Map<PCollection<?>, FieldAccessDescriptor> pCollectionFieldAccess = ImmutableMap.of(output, FieldAccessDescriptor.withFieldNames("field1", "field2")); ProjectionProducerVisitor visitor = new ProjectionProducerVisitor(pCollectionFieldAccess); p.traverseTopologically(visitor); Map<ProjectionProducer<PTransform<?, ?>>, Map<PCollection<?>, FieldAccessDescriptor>> pushdownOpportunities = visitor.getPushdownOpportunities(); Assert.assertEquals(1, pushdownOpportunities.size()); Map<PCollection<?>, FieldAccessDescriptor> opportunitiesForSource = pushdownOpportunities.get(source.innerT); Assert.assertNotNull(opportunitiesForSource); Assert.assertEquals(1, opportunitiesForSource.size()); FieldAccessDescriptor fieldAccessDescriptor = opportunitiesForSource.get(output); Assert.assertNotNull(fieldAccessDescriptor); Assert.assertFalse(fieldAccessDescriptor.getAllFields()); assertThat(fieldAccessDescriptor.fieldNamesAccessed(), containsInAnyOrder("field1", "field2")); }
public static Transcript parse(String str) { if (StringUtils.isBlank(str)) { return null; } str = str.replaceAll("\r\n", "\n"); Transcript transcript = new Transcript(); List<String> lines = Arrays.asList(str.split("\n")); Iterator<String> iter = lines.iterator(); String speaker = ""; String prevSpeaker = ""; StringBuilder body; String line; String segmentBody = ""; long startTimecode = -1L; long spanStartTimecode = -1L; long spanEndTimecode = -1L; long endTimecode = -1L; long duration = 0L; Set<String> speakers = new HashSet<>(); while (iter.hasNext()) { body = new StringBuilder(); line = iter.next(); if (line.isEmpty()) { continue; } spanEndTimecode = endTimecode; if (line.contains("-->")) { String[] timecodes = line.split("-->"); if (timecodes.length < 2) { continue; } startTimecode = parseTimecode(timecodes[0].trim()); endTimecode = parseTimecode(timecodes[1].trim()); if (startTimecode == -1 || endTimecode == -1) { continue; } if (spanStartTimecode == -1) { spanStartTimecode = startTimecode; } duration += endTimecode - startTimecode; do { line = iter.next(); if (StringUtil.isBlank(line)) { break; } body.append(line.strip()); body.append(" "); } while (iter.hasNext()); } if (body.indexOf(": ") != -1) { String[] parts = body.toString().trim().split(":"); if (parts.length < 2) { continue; } prevSpeaker = speaker; speaker = parts[0]; speakers.add(speaker); body = new StringBuilder(parts[1].strip()); if (StringUtils.isNotEmpty(prevSpeaker) && !StringUtils.equals(speaker, prevSpeaker)) { if (StringUtils.isNotEmpty(segmentBody)) { transcript.addSegment(new TranscriptSegment(spanStartTimecode, spanEndTimecode, segmentBody, prevSpeaker)); duration = 0L; spanStartTimecode = startTimecode; segmentBody = body.toString(); continue; } } } else { if (StringUtils.isNotEmpty(prevSpeaker) && StringUtils.isEmpty(speaker)) { speaker = prevSpeaker; } } segmentBody += " " + body; segmentBody = StringUtils.trim(segmentBody); if (duration >= TranscriptParser.MIN_SPAN && endTimecode > spanStartTimecode) { transcript.addSegment(new TranscriptSegment(spanStartTimecode, endTimecode, segmentBody, speaker)); duration = 0L; spanStartTimecode = -1L; segmentBody = ""; } } if (!StringUtil.isBlank(segmentBody) && endTimecode > spanStartTimecode) { segmentBody = StringUtils.trim(segmentBody); transcript.addSegment(new TranscriptSegment(spanStartTimecode, endTimecode, segmentBody, speaker)); } if (transcript.getSegmentCount() > 0) { transcript.setSpeakers(speakers); return transcript; } else { return null; } }
@Test public void testParseSrt() { Transcript result = SrtTranscriptParser.parse(srtStr); assertEquals(result.getSegmentAtTime(0L).getWords(), "Promoting your podcast in a new"); assertEquals(result.getSegmentAtTime(0L).getSpeaker(), "John Doe"); assertEquals(result.getSegmentAtTime(0L).getStartTime(), 0L); assertEquals(result.getSegmentAtTime(0L).getEndTime(), 50730L); assertEquals(result.getSegmentAtTime(90740).getStartTime(), 90740); assertEquals("way. The latest from PogNews. We bring your favorite podcast.", result.getSegmentAtTime(90740).getWords()); }
@Override protected void setProperties(Map<String, String> properties) throws DdlException { Preconditions.checkState(properties != null); for (String key : properties.keySet()) { if (!DRIVER_URL.equals(key) && !URI.equals(key) && !USER.equals(key) && !PASSWORD.equals(key) && !TYPE.equals(key) && !NAME.equals(key) && !DRIVER_CLASS.equals(key)) { throw new DdlException("Property " + key + " is unknown"); } } configs = properties; checkProperties(DRIVER_URL); checkProperties(DRIVER_CLASS); checkProperties(URI); checkProperties(USER); checkProperties(PASSWORD); computeDriverChecksum(); }
@Test(expected = DdlException.class) public void testWithoutPassword() throws Exception { Map<String, String> configs = getMockConfigs(); configs.remove(JDBCResource.PASSWORD); JDBCResource resource = new JDBCResource("jdbc_resource_test"); resource.setProperties(configs); }
@Override public HashSlotCursor16byteKey cursor() { return new CursorLongKey2(); }
@Test(expected = AssertionError.class) public void testCursor_valueAddress_whenDisposed() { HashSlotCursor16byteKey cursor = hsa.cursor(); hsa.dispose(); cursor.valueAddress(); }
public static void upgradeConfigurationAndVersion(RuleNode node, RuleNodeClassInfo nodeInfo) { JsonNode oldConfiguration = node.getConfiguration(); int configurationVersion = node.getConfigurationVersion(); int currentVersion = nodeInfo.getCurrentVersion(); var configClass = nodeInfo.getAnnotation().configClazz(); if (oldConfiguration == null || !oldConfiguration.isObject()) { log.warn("Failed to upgrade rule node with id: {} type: {} fromVersion: {} toVersion: {}. " + "Current configuration is null or not a json object. " + "Going to set default configuration ... ", node.getId(), node.getType(), configurationVersion, currentVersion); node.setConfiguration(getDefaultConfig(configClass)); } else { var tbVersionedNode = getTbVersionedNode(nodeInfo); try { JsonNode queueName = oldConfiguration.get(QUEUE_NAME); TbPair<Boolean, JsonNode> upgradeResult = tbVersionedNode.upgrade(configurationVersion, oldConfiguration); if (upgradeResult.getFirst()) { node.setConfiguration(upgradeResult.getSecond()); if (nodeInfo.getAnnotation().hasQueueName() && queueName != null && queueName.isTextual()) { node.setQueueName(queueName.asText()); } } } catch (Exception e) { try { JacksonUtil.treeToValue(oldConfiguration, configClass); } catch (Exception ex) { log.warn("Failed to upgrade rule node with id: {} type: {} fromVersion: {} toVersion: {}. " + "Going to set default configuration ... ", node.getId(), node.getType(), configurationVersion, currentVersion, e); node.setConfiguration(getDefaultConfig(configClass)); } } } node.setConfigurationVersion(currentVersion); }
@Test public void testUpgradeRuleNodeConfigurationWithNewConfigAndOldConfigVersion() throws Exception { // GIVEN var node = new RuleNode(); var nodeInfo = mock(RuleNodeClassInfo.class); var nodeConfigClazz = TbGetEntityDataNodeConfiguration.class; var annotation = mock(org.thingsboard.rule.engine.api.RuleNode.class); var defaultConfig = JacksonUtil.valueToTree(nodeConfigClazz.getDeclaredConstructor().newInstance().defaultConfiguration()); when(nodeInfo.getClazz()).thenReturn((Class) TbGetCustomerAttributeNode.class); when(nodeInfo.getCurrentVersion()).thenReturn(1); when(nodeInfo.getAnnotation()).thenReturn(annotation); when(annotation.configClazz()).thenReturn((Class) nodeConfigClazz); String versionOneDefaultConfig = "{\"fetchTo\":\"METADATA\"," + "\"dataMapping\":{\"alarmThreshold\":\"threshold\"}," + "\"dataToFetch\":\"ATTRIBUTES\"}"; node.setConfiguration(JacksonUtil.toJsonNode(versionOneDefaultConfig)); // WHEN TbNodeUpgradeUtils.upgradeConfigurationAndVersion(node, nodeInfo); // THEN Assertions.assertThat(node.getConfiguration()).isEqualTo(defaultConfig); Assertions.assertThat(node.getConfigurationVersion()).isEqualTo(1); }
@Override public void start() { this.executorService.scheduleAtFixedRate(this::tryBroadcastEvents, getInitialDelay(), getPeriod(), TimeUnit.SECONDS); }
@Test public void nothing_to_broadcast_when_client_list_is_empty() { when(clientsRegistry.getClients()).thenReturn(emptyList()); var underTest = new PushEventPollScheduler(executorService, clientsRegistry, db.getDbClient(), system2, config); underTest.start(); executorService.runCommand(); verify(clientsRegistry, times(0)).broadcastMessage(any(SonarLintPushEvent.class)); }
@Override public void updateDataSourceConfig(DataSourceConfigSaveReqVO updateReqVO) { // 校验存在 validateDataSourceConfigExists(updateReqVO.getId()); DataSourceConfigDO updateObj = BeanUtils.toBean(updateReqVO, DataSourceConfigDO.class); validateConnectionOK(updateObj); // 更新 dataSourceConfigMapper.updateById(updateObj); }
@Test public void testUpdateDataSourceConfig_success() { try (MockedStatic<JdbcUtils> databaseUtilsMock = mockStatic(JdbcUtils.class)) { // mock 数据 DataSourceConfigDO dbDataSourceConfig = randomPojo(DataSourceConfigDO.class); dataSourceConfigMapper.insert(dbDataSourceConfig);// @Sql: 先插入出一条存在的数据 // 准备参数 DataSourceConfigSaveReqVO reqVO = randomPojo(DataSourceConfigSaveReqVO.class, o -> { o.setId(dbDataSourceConfig.getId()); // 设置更新的 ID }); // mock 方法 databaseUtilsMock.when(() -> JdbcUtils.isConnectionOK(eq(reqVO.getUrl()), eq(reqVO.getUsername()), eq(reqVO.getPassword()))).thenReturn(true); // 调用 dataSourceConfigService.updateDataSourceConfig(reqVO); // 校验是否更新正确 DataSourceConfigDO dataSourceConfig = dataSourceConfigMapper.selectById(reqVO.getId()); // 获取最新的 assertPojoEquals(reqVO, dataSourceConfig); } }
@SuppressWarnings("fallthrough") public static long[] murmurhash3_x64_128(byte[] key, int offset, int len, int seed) { // The original algorithm does have a 32 bit unsigned seed. // We have to mask to match the behavior of the unsigned types and prevent sign extension. long h1 = seed & 0x00000000FFFFFFFFL; long h2 = seed & 0x00000000FFFFFFFFL; final long c1 = 0x87c37b91114253d5L; final long c2 = 0x4cf5ad432745937fL; int roundedEnd = offset + (len & 0xFFFFFFF0); // round down to 16 byte block for (int i = offset; i < roundedEnd; i += 16) { long k1 = getLongLittleEndian(key, i); long k2 = getLongLittleEndian(key, i + 8); k1 *= c1; k1 = Long.rotateLeft(k1, 31); k1 *= c2; h1 ^= k1; h1 = Long.rotateLeft(h1, 27); h1 += h2; h1 = h1 * 5 + 0x52dce729; k2 *= c2; k2 = Long.rotateLeft(k2, 33); k2 *= c1; h2 ^= k2; h2 = Long.rotateLeft(h2, 31); h2 += h1; h2 = h2 * 5 + 0x38495ab5; } long k1 = 0; long k2 = 0; switch (len & 15) { case 15: k2 = (key[roundedEnd + 14] & 0xffL) << 48; case 14: k2 |= (key[roundedEnd + 13] & 0xffL) << 40; case 13: k2 |= (key[roundedEnd + 12] & 0xffL) << 32; case 12: k2 |= (key[roundedEnd + 11] & 0xffL) << 24; case 11: k2 |= (key[roundedEnd + 10] & 0xffL) << 16; case 10: k2 |= (key[roundedEnd + 9] & 0xffL) << 8; case 9: k2 |= (key[roundedEnd + 8] & 0xffL); k2 *= c2; k2 = Long.rotateLeft(k2, 33); k2 *= c1; h2 ^= k2; case 8: k1 = ((long) key[roundedEnd + 7]) << 56; case 7: k1 |= (key[roundedEnd + 6] & 0xffL) << 48; case 6: k1 |= (key[roundedEnd + 5] & 0xffL) << 40; case 5: k1 |= (key[roundedEnd + 4] & 0xffL) << 32; case 4: k1 |= (key[roundedEnd + 3] & 0xffL) << 24; case 3: k1 |= (key[roundedEnd + 2] & 0xffL) << 16; case 2: k1 |= (key[roundedEnd + 1] & 0xffL) << 8; case 1: k1 |= (key[roundedEnd] & 0xffL); k1 *= c1; k1 = Long.rotateLeft(k1, 31); k1 *= c2; h1 ^= k1; } // ---------- // finalization h1 ^= len; h2 ^= len; h1 += h2; h2 += h1; h1 = fmix64(h1); h2 = fmix64(h2); h1 += h2; h2 += h1; return new long[] {h1, h2}; }
@Test public void testMurmurhash3_x64_128() { Random random = new Random(17); for (int i = 0; i < 128; i++) { byte[] bytes = new byte[i]; random.nextBytes(bytes); long hashCode1 = MurmurHash3.murmurhash3_x64_128(bytes, 0, bytes.length, 47)[0]; long hashCode2 = Hashing.murmur3_128(47).hashBytes(bytes).asLong(); Assert.assertEquals(hashCode1, hashCode2); } }
@Override public DataflowPipelineJob run(Pipeline pipeline) { // Multi-language pipelines and pipelines that include upgrades should automatically be upgraded // to Runner v2. if (DataflowRunner.isMultiLanguagePipeline(pipeline) || includesTransformUpgrades(pipeline)) { List<String> experiments = firstNonNull(options.getExperiments(), Collections.emptyList()); if (!experiments.contains("use_runner_v2")) { LOG.info( "Automatically enabling Dataflow Runner v2 since the pipeline used cross-language" + " transforms or pipeline needed a transform upgrade."); options.setExperiments( ImmutableList.<String>builder().addAll(experiments).add("use_runner_v2").build()); } } if (useUnifiedWorker(options)) { if (hasExperiment(options, "disable_runner_v2") || hasExperiment(options, "disable_runner_v2_until_2023") || hasExperiment(options, "disable_prime_runner_v2")) { throw new IllegalArgumentException( "Runner V2 both disabled and enabled: at least one of ['beam_fn_api', 'use_unified_worker', 'use_runner_v2', 'use_portable_job_submission'] is set and also one of ['disable_runner_v2', 'disable_runner_v2_until_2023', 'disable_prime_runner_v2'] is set."); } List<String> experiments = new ArrayList<>(options.getExperiments()); // non-null if useUnifiedWorker is true if (!experiments.contains("use_runner_v2")) { experiments.add("use_runner_v2"); } if (!experiments.contains("use_unified_worker")) { experiments.add("use_unified_worker"); } if (!experiments.contains("beam_fn_api")) { experiments.add("beam_fn_api"); } if (!experiments.contains("use_portable_job_submission")) { experiments.add("use_portable_job_submission"); } options.setExperiments(ImmutableList.copyOf(experiments)); } logWarningIfPCollectionViewHasNonDeterministicKeyCoder(pipeline); logWarningIfBigqueryDLQUnused(pipeline); if (shouldActAsStreaming(pipeline)) { options.setStreaming(true); if (useUnifiedWorker(options)) { options.setEnableStreamingEngine(true); List<String> experiments = new ArrayList<>(options.getExperiments()); // non-null if useUnifiedWorker is true if (!experiments.contains("enable_streaming_engine")) { experiments.add("enable_streaming_engine"); } if (!experiments.contains("enable_windmill_service")) { experiments.add("enable_windmill_service"); } } } if (!ExperimentalOptions.hasExperiment(options, "disable_projection_pushdown")) { ProjectionPushdownOptimizer.optimize(pipeline); } LOG.info( "Executing pipeline on the Dataflow Service, which will have billing implications " + "related to Google Compute Engine usage and other Google Cloud Services."); DataflowPipelineOptions dataflowOptions = options.as(DataflowPipelineOptions.class); String workerHarnessContainerImageURL = DataflowRunner.getContainerImageForJob(dataflowOptions); // This incorrectly puns the worker harness container image (which implements v1beta3 API) // with the SDK harness image (which implements Fn API). // // The same Environment is used in different and contradictory ways, depending on whether // it is a v1 or v2 job submission. RunnerApi.Environment defaultEnvironmentForDataflow = Environments.createDockerEnvironment(workerHarnessContainerImageURL); // The SdkComponents for portable an non-portable job submission must be kept distinct. Both // need the default environment. SdkComponents portableComponents = SdkComponents.create(); portableComponents.registerEnvironment( defaultEnvironmentForDataflow .toBuilder() .addAllDependencies(getDefaultArtifacts()) .addAllCapabilities(Environments.getJavaCapabilities()) .build()); RunnerApi.Pipeline portablePipelineProto = PipelineTranslation.toProto(pipeline, portableComponents, false); // Note that `stageArtifacts` has to be called before `resolveArtifact` because // `resolveArtifact` updates local paths to staged paths in pipeline proto. portablePipelineProto = resolveAnyOfEnvironments(portablePipelineProto); List<DataflowPackage> packages = stageArtifacts(portablePipelineProto); portablePipelineProto = resolveArtifacts(portablePipelineProto); portablePipelineProto = applySdkEnvironmentOverrides(portablePipelineProto, options); if (LOG.isDebugEnabled()) { LOG.debug( "Portable pipeline proto:\n{}", TextFormat.printer().printToString(portablePipelineProto)); } // Stage the portable pipeline proto, retrieving the staged pipeline path, then update // the options on the new job // TODO: add an explicit `pipeline` parameter to the submission instead of pipeline options LOG.info("Staging portable pipeline proto to {}", options.getStagingLocation()); byte[] serializedProtoPipeline = portablePipelineProto.toByteArray(); DataflowPackage stagedPipeline = options.getStager().stageToFile(serializedProtoPipeline, PIPELINE_FILE_NAME); dataflowOptions.setPipelineUrl(stagedPipeline.getLocation()); if (useUnifiedWorker(options)) { LOG.info("Skipping v1 transform replacements since job will run on v2."); } else { // Now rewrite things to be as needed for v1 (mutates the pipeline) // This way the job submitted is valid for v1 and v2, simultaneously replaceV1Transforms(pipeline); } // Capture the SdkComponents for look up during step translations SdkComponents dataflowV1Components = SdkComponents.create(); dataflowV1Components.registerEnvironment( defaultEnvironmentForDataflow .toBuilder() .addAllDependencies(getDefaultArtifacts()) .addAllCapabilities(Environments.getJavaCapabilities()) .build()); // No need to perform transform upgrading for the Runner v1 proto. RunnerApi.Pipeline dataflowV1PipelineProto = PipelineTranslation.toProto(pipeline, dataflowV1Components, true, false); if (LOG.isDebugEnabled()) { LOG.debug( "Dataflow v1 pipeline proto:\n{}", TextFormat.printer().printToString(dataflowV1PipelineProto)); } // Set a unique client_request_id in the CreateJob request. // This is used to ensure idempotence of job creation across retried // attempts to create a job. Specifically, if the service returns a job with // a different client_request_id, it means the returned one is a different // job previously created with the same job name, and that the job creation // has been effectively rejected. The SDK should return // Error::Already_Exists to user in that case. int randomNum = new Random().nextInt(9000) + 1000; String requestId = DateTimeFormat.forPattern("YYYYMMddHHmmssmmm") .withZone(DateTimeZone.UTC) .print(DateTimeUtils.currentTimeMillis()) + "_" + randomNum; JobSpecification jobSpecification = translator.translate( pipeline, dataflowV1PipelineProto, dataflowV1Components, this, packages); if (!isNullOrEmpty(dataflowOptions.getDataflowWorkerJar()) && !useUnifiedWorker(options)) { List<String> experiments = firstNonNull(dataflowOptions.getExperiments(), Collections.emptyList()); if (!experiments.contains("use_staged_dataflow_worker_jar")) { dataflowOptions.setExperiments( ImmutableList.<String>builder() .addAll(experiments) .add("use_staged_dataflow_worker_jar") .build()); } } Job newJob = jobSpecification.getJob(); try { newJob .getEnvironment() .setSdkPipelineOptions( MAPPER.readValue(MAPPER_WITH_MODULES.writeValueAsBytes(options), Map.class)); } catch (IOException e) { throw new IllegalArgumentException( "PipelineOptions specified failed to serialize to JSON.", e); } newJob.setClientRequestId(requestId); DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo(); String version = dataflowRunnerInfo.getVersion(); checkState( !"${pom.version}".equals(version), "Unable to submit a job to the Dataflow service with unset version ${pom.version}"); LOG.info("Dataflow SDK version: {}", version); newJob.getEnvironment().setUserAgent((Map) dataflowRunnerInfo.getProperties()); // The Dataflow Service may write to the temporary directory directly, so // must be verified. if (!isNullOrEmpty(options.getGcpTempLocation())) { newJob .getEnvironment() .setTempStoragePrefix( dataflowOptions.getPathValidator().verifyPath(options.getGcpTempLocation())); } newJob.getEnvironment().setDataset(options.getTempDatasetId()); if (options.getWorkerRegion() != null) { newJob.getEnvironment().setWorkerRegion(options.getWorkerRegion()); } if (options.getWorkerZone() != null) { newJob.getEnvironment().setWorkerZone(options.getWorkerZone()); } if (options.getFlexRSGoal() == DataflowPipelineOptions.FlexResourceSchedulingGoal.COST_OPTIMIZED) { newJob.getEnvironment().setFlexResourceSchedulingGoal("FLEXRS_COST_OPTIMIZED"); } else if (options.getFlexRSGoal() == DataflowPipelineOptions.FlexResourceSchedulingGoal.SPEED_OPTIMIZED) { newJob.getEnvironment().setFlexResourceSchedulingGoal("FLEXRS_SPEED_OPTIMIZED"); } // Represent the minCpuPlatform pipeline option as an experiment, if not already present. if (!isNullOrEmpty(dataflowOptions.getMinCpuPlatform())) { List<String> experiments = firstNonNull(dataflowOptions.getExperiments(), Collections.emptyList()); List<String> minCpuFlags = experiments.stream() .filter(p -> p.startsWith("min_cpu_platform")) .collect(Collectors.toList()); if (minCpuFlags.isEmpty()) { dataflowOptions.setExperiments( ImmutableList.<String>builder() .addAll(experiments) .add("min_cpu_platform=" + dataflowOptions.getMinCpuPlatform()) .build()); } else { LOG.warn( "Flag min_cpu_platform is defined in both top level PipelineOption, " + "as well as under experiments. Proceed using {}.", minCpuFlags.get(0)); } } newJob .getEnvironment() .setExperiments( ImmutableList.copyOf( firstNonNull(dataflowOptions.getExperiments(), Collections.emptyList()))); // Set the Docker container image that executes Dataflow worker harness, residing in Google // Container Registry. Translator is guaranteed to create a worker pool prior to this point. // For runner_v1, only worker_harness_container is set. // For runner_v2, both worker_harness_container and sdk_harness_container are set to the same // value. String containerImage = getContainerImageForJob(options); for (WorkerPool workerPool : newJob.getEnvironment().getWorkerPools()) { workerPool.setWorkerHarnessContainerImage(containerImage); } configureSdkHarnessContainerImages(options, portablePipelineProto, newJob); newJob.getEnvironment().setVersion(getEnvironmentVersion(options)); if (hooks != null) { hooks.modifyEnvironmentBeforeSubmission(newJob.getEnvironment()); } // enable upload_graph when the graph is too large byte[] jobGraphBytes = DataflowPipelineTranslator.jobToString(newJob).getBytes(UTF_8); int jobGraphByteSize = jobGraphBytes.length; if (jobGraphByteSize >= CREATE_JOB_REQUEST_LIMIT_BYTES && !hasExperiment(options, "upload_graph") && !useUnifiedWorker(options)) { List<String> experiments = firstNonNull(options.getExperiments(), Collections.emptyList()); options.setExperiments( ImmutableList.<String>builder().addAll(experiments).add("upload_graph").build()); LOG.info( "The job graph size ({} in bytes) is larger than {}. Automatically add " + "the upload_graph option to experiments.", jobGraphByteSize, CREATE_JOB_REQUEST_LIMIT_BYTES); } if (hasExperiment(options, "upload_graph") && useUnifiedWorker(options)) { ArrayList<String> experiments = new ArrayList<>(options.getExperiments()); while (experiments.remove("upload_graph")) {} options.setExperiments(experiments); LOG.warn( "The upload_graph experiment was specified, but it does not apply " + "to runner v2 jobs. Option has been automatically removed."); } // Upload the job to GCS and remove the graph object from the API call. The graph // will be downloaded from GCS by the service. if (hasExperiment(options, "upload_graph")) { DataflowPackage stagedGraph = options.getStager().stageToFile(jobGraphBytes, DATAFLOW_GRAPH_FILE_NAME); newJob.getSteps().clear(); newJob.setStepsLocation(stagedGraph.getLocation()); } if (!isNullOrEmpty(options.getDataflowJobFile()) || !isNullOrEmpty(options.getTemplateLocation())) { boolean isTemplate = !isNullOrEmpty(options.getTemplateLocation()); if (isTemplate) { checkArgument( isNullOrEmpty(options.getDataflowJobFile()), "--dataflowJobFile and --templateLocation are mutually exclusive."); } String fileLocation = firstNonNull(options.getTemplateLocation(), options.getDataflowJobFile()); checkArgument( fileLocation.startsWith("/") || fileLocation.startsWith("gs://"), "Location must be local or on Cloud Storage, got %s.", fileLocation); ResourceId fileResource = FileSystems.matchNewResource(fileLocation, false /* isDirectory */); String workSpecJson = DataflowPipelineTranslator.jobToString(newJob); try (PrintWriter printWriter = new PrintWriter( new BufferedWriter( new OutputStreamWriter( Channels.newOutputStream(FileSystems.create(fileResource, MimeTypes.TEXT)), UTF_8)))) { printWriter.print(workSpecJson); LOG.info("Printed job specification to {}", fileLocation); } catch (IOException ex) { String error = String.format("Cannot create output file at %s", fileLocation); if (isTemplate) { throw new RuntimeException(error, ex); } else { LOG.warn(error, ex); } } if (isTemplate) { LOG.info("Template successfully created."); return new DataflowTemplateJob(); } } String jobIdToUpdate = null; if (options.isUpdate()) { jobIdToUpdate = getJobIdFromName(options.getJobName()); newJob.setTransformNameMapping(options.getTransformNameMapping()); newJob.setReplaceJobId(jobIdToUpdate); } if (options.getCreateFromSnapshot() != null && !options.getCreateFromSnapshot().isEmpty()) { newJob.setTransformNameMapping(options.getTransformNameMapping()); newJob.setCreatedFromSnapshotId(options.getCreateFromSnapshot()); } Job jobResult; try { jobResult = dataflowClient.createJob(newJob); } catch (GoogleJsonResponseException e) { String errorMessages = "Unexpected errors"; if (e.getDetails() != null) { if (jobGraphByteSize >= CREATE_JOB_REQUEST_LIMIT_BYTES) { errorMessages = "The size of the serialized JSON representation of the pipeline " + "exceeds the allowable limit. " + "For more information, please see the documentation on job submission:\n" + "https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#jobs"; } else { errorMessages = e.getDetails().getMessage(); } } throw new RuntimeException("Failed to create a workflow job: " + errorMessages, e); } catch (IOException e) { throw new RuntimeException("Failed to create a workflow job", e); } // Use a raw client for post-launch monitoring, as status calls may fail // regularly and need not be retried automatically. DataflowPipelineJob dataflowPipelineJob = new DataflowPipelineJob( DataflowClient.create(options), jobResult.getId(), options, jobSpecification != null ? jobSpecification.getStepNames() : Collections.emptyMap(), portablePipelineProto); // If the service returned client request id, the SDK needs to compare it // with the original id generated in the request, if they are not the same // (i.e., the returned job is not created by this request), throw // DataflowJobAlreadyExistsException or DataflowJobAlreadyUpdatedException // depending on whether this is a reload or not. if (jobResult.getClientRequestId() != null && !jobResult.getClientRequestId().isEmpty() && !jobResult.getClientRequestId().equals(requestId)) { // If updating a job. if (options.isUpdate()) { throw new DataflowJobAlreadyUpdatedException( dataflowPipelineJob, String.format( "The job named %s with id: %s has already been updated into job id: %s " + "and cannot be updated again.", newJob.getName(), jobIdToUpdate, jobResult.getId())); } else { throw new DataflowJobAlreadyExistsException( dataflowPipelineJob, String.format( "There is already an active job named %s with id: %s. If you want to submit a" + " second job, try again by setting a different name using --jobName.", newJob.getName(), jobResult.getId())); } } LOG.info( "To access the Dataflow monitoring console, please navigate to {}", MonitoringUtil.getJobMonitoringPageURL( options.getProject(), options.getRegion(), jobResult.getId())); LOG.info("Submitted job: {}", jobResult.getId()); LOG.info( "To cancel the job using the 'gcloud' tool, run:\n> {}", MonitoringUtil.getGcloudCancelCommand(options, jobResult.getId())); return dataflowPipelineJob; }
@Test public void testRun() throws IOException { DataflowPipelineOptions options = buildPipelineOptions(); Pipeline p = buildDataflowPipeline(options); DataflowPipelineJob job = (DataflowPipelineJob) p.run(); assertEquals("newid", job.getJobId()); ArgumentCaptor<Job> jobCaptor = ArgumentCaptor.forClass(Job.class); Mockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture()); assertValidJob(jobCaptor.getValue()); }
@SuppressWarnings("unchecked") public static Object castValue(Object inputValue, FieldType input, FieldType output) { TypeName inputType = input.getTypeName(); TypeName outputType = output.getTypeName(); if (inputValue == null) { return null; } switch (inputType) { case ROW: return castRow((Row) inputValue, input.getRowSchema(), output.getRowSchema()); case ARRAY: case ITERABLE:; Iterable<Object> inputValues = (Iterable<Object>) inputValue; List<Object> outputValues = new ArrayList<>(Iterables.size(inputValues)); for (Object elem : inputValues) { outputValues.add( castValue(elem, input.getCollectionElementType(), output.getCollectionElementType())); } return outputValues; case MAP: Map<Object, Object> inputMap = (Map<Object, Object>) inputValue; Map<Object, Object> outputMap = Maps.newHashMapWithExpectedSize(inputMap.size()); for (Map.Entry<Object, Object> entry : inputMap.entrySet()) { Object outputKey = castValue(entry.getKey(), input.getMapKeyType(), output.getMapKeyType()); Object outputValue = castValue(entry.getValue(), input.getMapValueType(), output.getMapValueType()); outputMap.put(outputKey, outputValue); } return outputMap; default: if (inputType.equals(outputType)) { return inputValue; } if (inputType.isNumericType()) { return castNumber((Number) inputValue, inputType, outputType); } else { throw new IllegalArgumentException("input should be array, map, numeric or row"); } } }
@Test public void testCastArray() { Object output = Cast.castValue( Arrays.asList((short) 1, (short) 2, (short) 3), Schema.FieldType.array(Schema.FieldType.INT16), Schema.FieldType.array(Schema.FieldType.INT32)); assertEquals(Arrays.asList(1, 2, 3), output); }
@Override public GrokPattern save(GrokPattern pattern) throws ValidationException { try { if (!validate(pattern)) { throw new ValidationException("Invalid pattern " + pattern); } } catch (GrokException | PatternSyntaxException e) { throw new ValidationException("Invalid pattern " + pattern + "\n" + e.getMessage()); } if (loadByName(pattern.name()).isPresent()) { throw new ValidationException("Grok pattern " + pattern.name() + " already exists"); } final WriteResult<GrokPattern, ObjectId> result = dbCollection.save(pattern); final GrokPattern savedGrokPattern = result.getSavedObject(); clusterBus.post(GrokPatternsUpdatedEvent.create(ImmutableSet.of(savedGrokPattern.name()))); return savedGrokPattern; }
@Test public void saveSucceedsWithValidGrokPattern() throws ValidationException { service.save(GrokPattern.create("NUMBER", "[0-9]+")); verify(clusterEventBus, times(1)).post(any(GrokPatternsUpdatedEvent.class)); assertThat(collection.countDocuments()).isEqualTo(1L); }
public static long getDU(File dir) { long size = 0; if (!dir.exists()) return 0; if (!dir.isDirectory()) { return dir.length(); } else { File[] allFiles = dir.listFiles(); if (allFiles != null) { for (File f : allFiles) { if (!org.apache.commons.io.FileUtils.isSymlink(f)) { size += getDU(f); } } } return size; } }
@Test (timeout = 30000) public void testGetDU() throws Exception { long du = FileUtil.getDU(testFolder.getRoot()); // Only two files (in partitioned). Each has 3 characters + system-specific // line separator. final long expected = 2 * (3 + System.getProperty("line.separator").length()); Assert.assertEquals(expected, du); // target file does not exist: final File doesNotExist = new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog"); long duDoesNotExist = FileUtil.getDU(doesNotExist); assertEquals(0, duDoesNotExist); // target file is not a directory: File notADirectory = new File(partitioned, "part-r-00000"); long duNotADirectoryActual = FileUtil.getDU(notADirectory); long duNotADirectoryExpected = 3 + System.getProperty("line.separator").length(); assertEquals(duNotADirectoryExpected, duNotADirectoryActual); try { // one of target files is not accessible, but the containing directory // is accessible: try { FileUtil.chmod(notADirectory.getAbsolutePath(), "0000"); } catch (InterruptedException ie) { // should never happen since that method never throws InterruptedException. assertNull(ie); } assertFalse(FileUtil.canRead(notADirectory)); final long du3 = FileUtil.getDU(partitioned); assertEquals(expected, du3); // some target files and containing directory are not accessible: try { FileUtil.chmod(partitioned.getAbsolutePath(), "0000"); } catch (InterruptedException ie) { // should never happen since that method never throws InterruptedException. assertNull(ie); } assertFalse(FileUtil.canRead(partitioned)); final long du4 = FileUtil.getDU(partitioned); assertEquals(0, du4); } finally { // Restore the permissions so that we can delete the folder // in @After method: FileUtil.chmod(partitioned.getAbsolutePath(), "0777", true/*recursive*/); } }
public void verifyState(HttpRequest request, @Nullable String csrfState, @Nullable String login) { if (!shouldRequestBeChecked(request)) { return; } String failureCause = checkCsrf(csrfState, request.getHeader(CSRF_HEADER)); if (failureCause != null) { throw AuthenticationException.newBuilder() .setSource(Source.local(Method.JWT)) .setLogin(login) .setMessage(failureCause) .build(); } }
@Test public void verify_state() { mockRequestCsrf(CSRF_STATE); mockPostJavaWsRequest(); underTest.verifyState(request, CSRF_STATE, LOGIN); }
public static void smooth(PointList geometry, double maxWindowSize) { if (geometry.size() <= 2) { // geometry consists only of tower nodes, there are no pillar nodes to be smoothed in between return; } // calculate the distance between all points once here to avoid repeated calculation. // for n nodes there are always n-1 edges double[] distances = new double[geometry.size() - 1]; for (int i = 0; i <= geometry.size() - 2; i++) { distances[i] = DistancePlaneProjection.DIST_PLANE.calcDist( geometry.getLat(i), geometry.getLon(i), geometry.getLat(i + 1), geometry.getLon(i + 1) ); } // map that will collect all smoothed elevation values, size is less by 2 // because elevation of start and end point (tower nodes) won't be touched IntDoubleHashMap averagedElevations = new IntDoubleHashMap((geometry.size() - 1) * 4 / 3); // iterate over every pillar node to smooth its elevation // first and last points are left out as they are tower nodes for (int i = 1; i <= geometry.size() - 2; i++) { // first, determine the average window which could be smaller when close to pillar nodes double searchDistance = maxWindowSize / 2.0; double searchDistanceBack = 0.0; for (int j = i - 1; j >= 0; j--) { searchDistanceBack += distances[j]; if (searchDistanceBack > searchDistance) { break; } } // update search distance if pillar node is close to START tower node searchDistance = Math.min(searchDistance, searchDistanceBack); double searchDistanceForward = 0.0; for (int j = i; j < geometry.size() - 1; j++) { searchDistanceForward += distances[j]; if (searchDistanceForward > searchDistance) { break; } } // update search distance if pillar node is close to END tower node searchDistance = Math.min(searchDistance, searchDistanceForward); if (searchDistance <= 0.0) { // there is nothing to smooth. this is an edge case where pillar nodes share exactly the same location // as a tower node. // by doing so we avoid (at least theoretically) a division by zero later in the function call continue; } // area under elevation curve double elevationArea = 0.0; // first going again backwards double distanceBack = 0.0; for (int j = i - 1; j >= 0; j--) { double dist = distances[j]; double searchDistLeft = searchDistance - distanceBack; distanceBack += dist; if (searchDistLeft < dist) { // node lies outside averaging window double elevationDelta = geometry.getEle(j) - geometry.getEle(j + 1); double elevationAtSearchDistance = geometry.getEle(j + 1) + searchDistLeft / dist * elevationDelta; elevationArea += searchDistLeft * (geometry.getEle(j + 1) + elevationAtSearchDistance) / 2.0; break; } else { elevationArea += dist * (geometry.getEle(j + 1) + geometry.getEle(j)) / 2.0; } } // now going forward double distanceForward = 0.0; for (int j = i; j < geometry.size() - 1; j++) { double dist = distances[j]; double searchDistLeft = searchDistance - distanceForward; distanceForward += dist; if (searchDistLeft < dist) { double elevationDelta = geometry.getEle(j + 1) - geometry.getEle(j); double elevationAtSearchDistance = geometry.getEle(j) + searchDistLeft / dist * elevationDelta; elevationArea += searchDistLeft * (geometry.getEle(j) + elevationAtSearchDistance) / 2.0; break; } else { elevationArea += dist * (geometry.getEle(j + 1) + geometry.getEle(j)) / 2.0; } } double elevationAverage = elevationArea / (searchDistance * 2); averagedElevations.put(i, elevationAverage); } // after all pillar nodes got an averaged elevation, elevations are overwritten averagedElevations.forEach((Consumer<IntDoubleCursor>) c -> geometry.setElevation(c.key, c.value)); }
@Test public void testDenseWay() { // distance: 100m, 50m, 50m, 100m PointList pl = new PointList(5, true); pl.add(47.32763157186426, 10.158549243021412, 30); pl.add(47.32846770417248, 10.159039980808643, 20); pl.add(47.32891933217678, 10.159062491716355, 0); pl.add(47.32935875031157, 10.159197557162912, 200); pl.add(47.330136877623886, 10.159850373485142, 300); EdgeElevationSmoothingMovingAverage.smooth(pl, 150.0); assertEquals(5, pl.size()); assertEquals(30, pl.getEle(0), 0.000001); assertEquals(((27.5 + 20.0) / 2.0 * 75.0 + 10 * 50.24 + 50 * 24.7) / 150.0, pl.getEle(1), 0.1); assertEquals(((22.5 + 20.0) / 2.0 * 25.0 + 10 * 50.24 + 50 * 100 + 25 * 212.5) / 150.0, pl.getEle(2), 0.1); assertEquals((5 * 25 + 50 * 100 + 75 * 237.5) / 150.0, pl.getEle(3), 1); assertEquals(300, pl.getEle(4), 0.000001); }
@SafeVarargs public static <T> Set<T> createSetContaining(T... contents) { return new HashSet<>(Arrays.asList(contents)); }
@Test public void testCreateSetContaining() { Set<String> set = CollectionHelper.createSetContaining("foo", "bar", "baz"); assertEquals(3, set.size()); assertTrue(set.contains("foo")); assertTrue(set.contains("bar")); assertTrue(set.contains("baz")); }
public static Expression generateFilterExpression(SearchArgument sarg) { return translate(sarg.getExpression(), sarg.getLeaves()); }
@Test public void testStringType() { SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); SearchArgument arg = builder.startAnd().equals("string", PredicateLeaf.Type.STRING, "Joe").end().build(); UnboundPredicate expected = Expressions.equal("string", "Joe"); UnboundPredicate actual = (UnboundPredicate) HiveIcebergFilterFactory.generateFilterExpression(arg); assertPredicatesMatch(expected, actual); }
public static <T> T createNewClassInstance(Class<T> cls, Class<?>[] ctorClassArgs, Object[] ctorArgs) { try { if (ctorClassArgs == null) { return cls.newInstance(); } Constructor<T> ctor = cls.getConstructor(ctorClassArgs); return ctor.newInstance(ctorArgs); } catch (InvocationTargetException e) { throw new RuntimeException(e.getCause()); } catch (ReflectiveOperationException e) { throw new RuntimeException(e); } }
@Test public void createNewClassInstance() { class TestCase { Class<?> mCls; Class<?>[] mCtorClassArgs; Object[] mCtorArgs; String mExpected; public TestCase(String expected, Class<?> cls, Class<?>[] ctorClassArgs, Object... ctorArgs) { mCls = cls; mCtorClassArgs = ctorClassArgs; mCtorArgs = ctorArgs; mExpected = expected; } } List<TestCase> testCases = new ArrayList<>(); testCases.add(new TestCase("hello", TestClassA.class, null)); testCases.add(new TestCase("1", TestClassB.class, new Class[] {int.class}, 1)); for (TestCase testCase : testCases) { Object o = CommonUtils.createNewClassInstance(testCase.mCls, testCase.mCtorClassArgs, testCase.mCtorArgs); assertEquals(o.toString(), testCase.mExpected); } }
public static Object convert(final Object o) { if (o == null) { return RubyUtil.RUBY.getNil(); } final Class<?> cls = o.getClass(); final Valuefier.Converter converter = CONVERTER_MAP.get(cls); if (converter != null) { return converter.convert(o); } return fallbackConvert(o, cls); }
@Test public void testJodaDateTIme() { DateTime jo = DateTime.now(); Object result = Valuefier.convert(jo); assertEquals(JrubyTimestampExtLibrary.RubyTimestamp.class, result.getClass()); }
@Nullable public String getLeadingPath(int n) { String path = mUri.getPath(); if (n == 0 && path.indexOf(AlluxioURI.SEPARATOR) == 0) { // the special case return AlluxioURI.SEPARATOR; } int depth = getDepth(); if (depth < n) { return null; } else if (depth == n) { return path; } else { String[] comp = path.split(SEPARATOR); return StringUtils.join(Arrays.asList(comp).subList(0, n + 1), SEPARATOR); } }
@Test public void getLeadingPath() { assertEquals("/", new AlluxioURI("/a/b/c/").getLeadingPath(0)); assertEquals("/a", new AlluxioURI("/a/b/c/").getLeadingPath(1)); assertEquals("/a/b", new AlluxioURI("/a/b/c/").getLeadingPath(2)); assertEquals("/a/b/c", new AlluxioURI("/a/b/c/").getLeadingPath(3)); assertEquals(null, new AlluxioURI("/a/b/c/").getLeadingPath(4)); assertEquals("/", new AlluxioURI("/").getLeadingPath(0)); assertEquals("", new AlluxioURI("").getLeadingPath(0)); assertEquals(null, new AlluxioURI("").getLeadingPath(1)); assertEquals(".", new AlluxioURI(".").getLeadingPath(0)); assertEquals(null, new AlluxioURI(".").getLeadingPath(1)); assertEquals("a/b", new AlluxioURI("a/b/c").getLeadingPath(1)); }
@Override public void close() throws UnavailableException { // JournalContext is closed before block deletion context so that file system master changes // are written before block master changes. If a failure occurs between deleting an inode and // remove its blocks, it's better to have an orphaned block than an inode with a missing block. closeQuietly(mJournalContext); closeQuietly(mBlockDeletionContext); if (mThrown != null) { Throwables.propagateIfPossible(mThrown, UnavailableException.class); throw new RuntimeException(mThrown); } }
@Test public void journalContextThrows() throws Throwable { Exception jcException = new UnavailableException("journal context exception"); doThrow(jcException).when(mMockJC).close(); checkClose(jcException); }
public URL getInterNodeListener( final Function<URL, Integer> portResolver ) { return getInterNodeListener(portResolver, LOGGER); }
@Test public void shouldResolveInterNodeListenerToFirstListenerSetToResolvableHost() { // Given: final URL expected = url("https://example.com:12345"); final KsqlRestConfig config = new KsqlRestConfig(ImmutableMap.<String, Object>builder() .put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") .put(LISTENERS_CONFIG, expected.toString() + ",http://localhost:2589") .build() ); // When: final URL actual = config.getInterNodeListener(portResolver, logger); // Then: assertThat(actual, is(expected)); verifyLogsInterNodeListener(expected, QUOTED_FIRST_LISTENER_CONFIG); verifyNoMoreInteractions(logger); }
public double getNormalizedEditDistance(String source, String target) { ImmutableList<String> sourceTerms = NamingConventions.splitToLowercaseTerms(source); ImmutableList<String> targetTerms = NamingConventions.splitToLowercaseTerms(target); // costMatrix[s][t] is the edit distance between source term s and target term t double[][] costMatrix = sourceTerms.stream() .map(s -> targetTerms.stream().mapToDouble(t -> editDistanceFn.apply(s, t)).toArray()) .toArray(double[][]::new); // worstCaseMatrix[s][t] is the worst case distance between source term s and target term t double[][] worstCaseMatrix = sourceTerms.stream() .map(s -> s.length()) .map( s -> targetTerms.stream() .map(t -> t.length()) .mapToDouble(t -> maxDistanceFn.apply(s, t)) .toArray()) .toArray(double[][]::new); double[] sourceTermDeletionCosts = sourceTerms.stream().mapToDouble(s -> maxDistanceFn.apply(s.length(), 0)).toArray(); double[] targetTermAdditionCosts = targetTerms.stream().mapToDouble(s -> maxDistanceFn.apply(0, s.length())).toArray(); // this is an array of assignments of source terms to target terms. If assignments[i] contains // the value j this means that source term i has been assigned to target term j // There will be one entry in cost for each source term: // - If there are more source terms than target terms then some will be unassigned - value -1 // - If there are a fewer source terms than target terms then some target terms will not be // referenced in the array int[] assignments = new HungarianAlgorithm(costMatrix).execute(); double assignmentCost = computeCost(assignments, costMatrix, sourceTermDeletionCosts, targetTermAdditionCosts); double maxCost = computeCost(assignments, worstCaseMatrix, sourceTermDeletionCosts, targetTermAdditionCosts); return assignmentCost / maxCost; }
@Test public void getNormalizedEditDistance_returnsNoMatch_withDifferentTerms() { TermEditDistance termEditDistance = new TermEditDistance((s, t) -> s.equals(t) ? 0.0 : 1.0, (s, t) -> 1.0); String sourceIdentifier = "fooBar"; String targetIdentifier = "bazQux"; double distance = termEditDistance.getNormalizedEditDistance(sourceIdentifier, targetIdentifier); assertThat(distance).isEqualTo(1.0); }
@Override public Map<String, String> getLabels() { final Map<String, String> labels = new HashMap<>(); labels.putAll( flinkConfig .getOptional(KubernetesConfigOptions.JOB_MANAGER_LABELS) .orElse(Collections.emptyMap())); labels.putAll(getSelectors()); return Collections.unmodifiableMap(labels); }
@Test void testPrioritizeBuiltInLabels() { final Map<String, String> userLabels = new HashMap<>(); userLabels.put(Constants.LABEL_TYPE_KEY, "user-label-type"); userLabels.put(Constants.LABEL_APP_KEY, "user-label-app"); userLabels.put(Constants.LABEL_COMPONENT_KEY, "user-label-component-jm"); flinkConfig.set(KubernetesConfigOptions.JOB_MANAGER_LABELS, userLabels); final Map<String, String> expectedLabels = new HashMap<>(getCommonLabels()); expectedLabels.put(Constants.LABEL_COMPONENT_KEY, Constants.LABEL_COMPONENT_JOB_MANAGER); assertThat(kubernetesJobManagerParameters.getLabels()).isEqualTo(expectedLabels); }
@Override public Validation validate(Validation val) { if (StringUtils.isBlank(systemEnvironment.getPropertyImpl("jetty.home"))) { systemEnvironment.setProperty("jetty.home", systemEnvironment.getPropertyImpl("user.dir")); } systemEnvironment.setProperty("jetty.base", systemEnvironment.getPropertyImpl("jetty.home")); File home = new File(systemEnvironment.getPropertyImpl("jetty.home")); File work = new File(systemEnvironment.getPropertyImpl("jetty.home"), "work"); if (home.exists()) { if (work.exists()) { try { FileUtils.deleteDirectory(work); } catch (IOException e) { String message = format("Error trying to remove Jetty working directory {0}: {1}", work.getAbsolutePath(), e); return val.addError(new RuntimeException(message)); } } work.mkdir(); } return Validation.SUCCESS; }
@Test public void shouldSetJettyHomeAndBasePropertyIfItsNotSet() { when(systemEnvironment.getPropertyImpl("jetty.home")).thenReturn(""); when(systemEnvironment.getPropertyImpl("user.dir")).thenReturn("junk"); Validation val = new Validation(); jettyWorkDirValidator.validate(val); assertThat(val.isSuccessful(), is(true)); verify(systemEnvironment).getPropertyImpl("user.dir"); verify(systemEnvironment).setProperty("jetty.home", "junk"); }
@Override public Num calculate(BarSeries series, Position position) { return position.hasProfit() ? series.one() : series.zero(); }
@Test public void calculateWithOneLongPosition() { MockBarSeries series = new MockBarSeries(numFunction, 100, 105, 110, 100, 95, 105); Position position = new Position(Trade.buyAt(0, series), Trade.sellAt(2, series)); assertNumEquals(1, getCriterion().calculate(series, position)); }
@Nonnull @Override public Sketch<IntegerSummary> getResult() { return unionAll(); }
@Test public void testThresholdBehavior() { IntegerSketch input1 = new IntegerSketch(_lgK, IntegerSummary.Mode.Sum); IntStream.range(0, 1000).forEach(i -> input1.update(i, 1)); CompactSketch<IntegerSummary> sketch1 = input1.compact(); IntegerSketch input2 = new IntegerSketch(_lgK, IntegerSummary.Mode.Sum); IntStream.range(1000, 2000).forEach(i -> input2.update(i, 1)); CompactSketch<IntegerSummary> sketch2 = input2.compact(); TupleIntSketchAccumulator accumulator = new TupleIntSketchAccumulator(_setOps, _nominalEntries, 3); accumulator.apply(sketch1); accumulator.apply(sketch2); Assert.assertEquals(accumulator.getResult().getEstimate(), sketch1.getEstimate() + sketch2.getEstimate()); }
@SneakyThrows({InterruptedException.class, ExecutionException.class}) @Override public List<String> getChildrenKeys(final String key) { String prefix = key + PATH_SEPARATOR; ByteSequence prefixByteSequence = ByteSequence.from(prefix, StandardCharsets.UTF_8); GetOption getOption = GetOption.newBuilder().isPrefix(true).withSortField(GetOption.SortTarget.KEY).withSortOrder(GetOption.SortOrder.ASCEND).build(); List<KeyValue> keyValues = client.getKVClient().get(prefixByteSequence, getOption).get().getKvs(); return keyValues.stream().map(each -> getSubNodeKeyName(prefix, each.getKey().toString(StandardCharsets.UTF_8))).distinct().collect(Collectors.toList()); }
@Test void assertGetChildrenKeysWhenThrowExecutionException() throws ExecutionException, InterruptedException { doThrow(ExecutionException.class).when(getFuture).get(); try { repository.getChildrenKeys("/key/key1"); // CHECKSTYLE:OFF } catch (final Exception ex) { // CHECKSTYLE:ON assertThat(ex, instanceOf(ExecutionException.class)); } }
@Override public ShardingStrategyConfiguration swapToObject(final YamlShardingStrategyConfiguration yamlConfig) { int shardingStrategyConfigCount = 0; ShardingStrategyConfiguration result = null; if (null != yamlConfig.getStandard()) { shardingStrategyConfigCount++; result = createStandardShardingStrategyConfiguration(yamlConfig.getStandard()); } if (null != yamlConfig.getComplex()) { shardingStrategyConfigCount++; result = createComplexShardingStrategyConfiguration(yamlConfig.getComplex()); } if (null != yamlConfig.getHint()) { shardingStrategyConfigCount++; result = createHintShardingStrategyConfiguration(yamlConfig.getHint()); } if (null != yamlConfig.getNone()) { shardingStrategyConfigCount++; result = new NoneShardingStrategyConfiguration(); } ShardingSpherePreconditions.checkState(shardingStrategyConfigCount <= 1, InvalidShardingStrategyConfigCountException::new); return result; }
@Test void assertSwapToObjectForNoneShardingStrategy() { YamlShardingStrategyConfiguration yamlConfig = new YamlShardingStrategyConfiguration(); yamlConfig.setNone(new YamlNoneShardingStrategyConfiguration()); YamlShardingStrategyConfigurationSwapper swapper = new YamlShardingStrategyConfigurationSwapper(); ShardingStrategyConfiguration actual = swapper.swapToObject(yamlConfig); assertThat(actual, instanceOf(NoneShardingStrategyConfiguration.class)); }
@Override public TempFileSpace newSpace(final String subdirectoryPrefix) { // TODO: Accept only ISO 8601-style timestamp in the v0.10 series. if (!ISO8601_BASIC_PATTERN.matcher(subdirectoryPrefix).matches()) { logger.warn("TempFileSpaceAllocator#newSpace should be called with ISO 8601 basic format: {}", subdirectoryPrefix); } // It is originally intended to support multiple files/directories, but the reasons are missing. // https://github.com/embulk/embulk/commit/a7643573ecb39e6dd71a08edce77c8e64dc70a77 // https://github.com/embulk/embulk/commit/5a78270a4fc20e3c113c68e4c0f6c66c1bd45886 // UNIX/Linux cannot include '/' as file name. // Windows cannot include ':' as file name. try { return TempFileSpaceImpl.with( this.tempDirectoryBase, "embulk" + subdirectoryPrefix.replace('/', '-').replace(':', '-')); } catch (final IOException ex) { throw new UncheckedIOException(ex); } }
@Test public void testNewSpaceWithIso8601Basic() { final TempFileSpaceAllocator allocator = new SimpleTempFileSpaceAllocator(); allocator.newSpace("20191031T123456Z"); }
public synchronized int sendFetches() { final Map<Node, FetchSessionHandler.FetchRequestData> fetchRequests = prepareFetchRequests(); sendFetchesInternal( fetchRequests, (fetchTarget, data, clientResponse) -> { synchronized (Fetcher.this) { handleFetchSuccess(fetchTarget, data, clientResponse); } }, (fetchTarget, data, error) -> { synchronized (Fetcher.this) { handleFetchFailure(fetchTarget, data, error); } }); return fetchRequests.size(); }
@Test public void testSubscriptionPositionUpdatedWithEpoch() { // Create some records that include a leader epoch (1) MemoryRecordsBuilder builder = MemoryRecords.builder( ByteBuffer.allocate(1024), RecordBatch.CURRENT_MAGIC_VALUE, Compression.NONE, TimestampType.CREATE_TIME, 0L, RecordBatch.NO_TIMESTAMP, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE, false, 1 ); builder.appendWithOffset(0L, 0L, "key".getBytes(), "value-1".getBytes()); builder.appendWithOffset(1L, 0L, "key".getBytes(), "value-2".getBytes()); builder.appendWithOffset(2L, 0L, "key".getBytes(), "value-3".getBytes()); MemoryRecords records = builder.build(); buildFetcher(); assignFromUser(singleton(tp0)); // Initialize the epoch=1 Map<String, Integer> partitionCounts = new HashMap<>(); partitionCounts.put(tp0.topic(), 4); MetadataResponse metadataResponse = RequestTestUtils.metadataUpdateWithIds("dummy", 1, Collections.emptyMap(), partitionCounts, tp -> 1, topicIds); metadata.updateWithCurrentRequestVersion(metadataResponse, false, 0L); // Seek subscriptions.seek(tp0, 0); // Do a normal fetch assertEquals(1, sendFetches()); assertFalse(fetcher.hasCompletedFetches()); client.prepareResponse(fullFetchResponse(tidp0, records, Errors.NONE, 100L, 0)); consumerClient.pollNoWakeup(); assertTrue(fetcher.hasCompletedFetches()); Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords = fetchRecords(); assertTrue(partitionRecords.containsKey(tp0)); assertEquals(subscriptions.position(tp0).offset, 3L); assertOptional(subscriptions.position(tp0).offsetEpoch, value -> assertEquals(value.intValue(), 1)); }
public static Date parseHttpDate(CharSequence txt) { return parseHttpDate(txt, 0, txt.length()); }
@Test public void testParseAllMonths() { assertEquals(Calendar.JANUARY, getMonth(parseHttpDate("Sun, 06 Jan 1994 08:49:37 GMT"))); assertEquals(Calendar.FEBRUARY, getMonth(parseHttpDate("Sun, 06 Feb 1994 08:49:37 GMT"))); assertEquals(Calendar.MARCH, getMonth(parseHttpDate("Sun, 06 Mar 1994 08:49:37 GMT"))); assertEquals(Calendar.APRIL, getMonth(parseHttpDate("Sun, 06 Apr 1994 08:49:37 GMT"))); assertEquals(Calendar.MAY, getMonth(parseHttpDate("Sun, 06 May 1994 08:49:37 GMT"))); assertEquals(Calendar.JUNE, getMonth(parseHttpDate("Sun, 06 Jun 1994 08:49:37 GMT"))); assertEquals(Calendar.JULY, getMonth(parseHttpDate("Sun, 06 Jul 1994 08:49:37 GMT"))); assertEquals(Calendar.AUGUST, getMonth(parseHttpDate("Sun, 06 Aug 1994 08:49:37 GMT"))); assertEquals(Calendar.SEPTEMBER, getMonth(parseHttpDate("Sun, 06 Sep 1994 08:49:37 GMT"))); assertEquals(Calendar.OCTOBER, getMonth(parseHttpDate("Sun Oct 06 08:49:37 1994"))); assertEquals(Calendar.NOVEMBER, getMonth(parseHttpDate("Sun Nov 06 08:49:37 1994"))); assertEquals(Calendar.DECEMBER, getMonth(parseHttpDate("Sun Dec 06 08:49:37 1994"))); }
public CompletableFuture<LookupTopicResult> getBroker(TopicName topicName) { long startTime = System.nanoTime(); final MutableObject<CompletableFuture> newFutureCreated = new MutableObject<>(); try { return lookupInProgress.computeIfAbsent(topicName, tpName -> { CompletableFuture<LookupTopicResult> newFuture = findBroker(serviceNameResolver.resolveHost(), false, topicName, 0); newFutureCreated.setValue(newFuture); newFuture.thenRun(() -> { histoGetBroker.recordSuccess(System.nanoTime() - startTime); }).exceptionally(x -> { histoGetBroker.recordFailure(System.nanoTime() - startTime); return null; }); return newFuture; }); } finally { if (newFutureCreated.getValue() != null) { newFutureCreated.getValue().whenComplete((v, ex) -> { lookupInProgress.remove(topicName, newFutureCreated.getValue()); }); } } }
@Test(invocationTimeOut = 3000) public void maxLookupRedirectsTest1() throws Exception { LookupTopicResult lookupResult = lookup.getBroker(topicName).get(); assertEquals(lookupResult.getLogicalAddress(), InetSocketAddress .createUnresolved("broker2.pulsar.apache.org" ,6650)); assertEquals(lookupResult.getPhysicalAddress(), InetSocketAddress .createUnresolved("broker2.pulsar.apache.org" ,6650)); assertEquals(lookupResult.isUseProxy(), false); }
public static SnapshotRef fromJson(String json) { Preconditions.checkArgument( json != null && !json.isEmpty(), "Cannot parse snapshot ref from invalid JSON: %s", json); return JsonUtil.parse(json, SnapshotRefParser::fromJson); }
@Test public void testFailParsingWhenNullOrEmptyJson() { String nullJson = null; assertThatThrownBy(() -> SnapshotRefParser.fromJson(nullJson)) .isInstanceOf(IllegalArgumentException.class) .hasMessageStartingWith("Cannot parse snapshot ref from invalid JSON"); String emptyJson = ""; assertThatThrownBy(() -> SnapshotRefParser.fromJson(emptyJson)) .isInstanceOf(IllegalArgumentException.class) .hasMessageStartingWith("Cannot parse snapshot ref from invalid JSON"); }
public static boolean codeAwareEqualsIgnoreSpaces(String in1, String in2) { if ( in1 == null || in2 == null ) { return in1 == null && in2 == null; } if ( in1.isEmpty() && in2.isEmpty() ) { return true; } if ( in1.length() == 0 ) { in1 = " "; } if ( in2.length() == 0 ) { in2 = " "; } int idx1 = 0; Character quoted1 = null; int idx2 = 0; Character quoted2 = null; boolean equals = true; while ( equals ) { while ( idx1 < (in1.length()) && (quoted1==null) && isWhitespace(in1.charAt(idx1)) ) { idx1++; } while ( idx2 < (in2.length()) && (quoted2==null) && isWhitespace(in2.charAt(idx2)) ) { idx2++; } if ( idx1 >= in1.length() || idx2 >= in2.length() ) { // considered equals if equals check succeeded and both indexes reached end of respective string. equals = equals && idx1 == in1.length() && idx2 == in2.length(); break; } if ( in1.charAt(idx1) == '"' || in1.charAt(idx1) == '\'' ) { if ( quoted1 == null ) { quoted1 = in1.charAt(idx1); } else if ( quoted1 != null && quoted1.equals( in1.charAt(idx1) ) ) { if ( in1.charAt(idx1-1) != '\\' ) { quoted1 = null; } } } if ( in2.charAt(idx2) == '"' || in2.charAt(idx2) == '\'' ) { if ( quoted2 == null ) { quoted2 = in2.charAt(idx2); } else if ( quoted2 != null && quoted2.equals( in2.charAt(idx2) ) ) { if ( in2.charAt(idx2-1) != '\\' ) { quoted2 = null; } } } equals &= in1.charAt(idx1) == in2.charAt(idx2); idx1++; idx2++; } return equals; }
@Test public void test_codeAwareEqualsIgnoreSpaces() { assertThat(StringUtils.codeAwareEqualsIgnoreSpaces(null, null)).isTrue(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces("", "")).isTrue(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces("", null)).isFalse(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces(null, "")).isFalse(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces(" ", "")).isTrue(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces("", " ")).isTrue(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces(" ", " ")).isTrue(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces( "rule Rx when then end", " rule Rx when then end " // <<- DIFF 3x )).isTrue(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces( "rule Rx when then end\n", " rule Rx when then end\n " // <<- DIFF, both terminate with whitespace but different types )).isTrue(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces( "package org.drools.compiler\n", "package org.drools.compiler\n " + "rule Rx when\n" + " $m : Message( message == \"Hello World\" )\n" + "then\n" + "end\n" )).isFalse(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces( "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == \"Hello World\" )\n" + "then\n" + "end\n", "package org.drools.compiler\n " + // <<- DIFF "rule Rx when\n" + " $m : Message( message == \"Hello World\" )\n" + "then\n" + "end\n" )).isTrue(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces( "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == \"Hello World\" )\n" + "then\n" + "end\n", " package org.drools.compiler\n" + // <<- DIFF (at beginning of this line) "rule Rx when\n" + " $m : Message( message == \"Hello World\" )\n" + "then\n" + "end\n" )).isTrue(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces( "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == \"Hello World\" )\n" + "then\n" + "end\n", " package org.drools.compiler\n " + // <<- DIFF 2x "rule Rx when\n" + " $m : Message( message == \"Hello World\" )\n" + "then\n" + "end\n " // <<- DIFF )).isTrue(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces( "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == \"Hello World\" )\n" + "then\n" + "end\n", "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == \"Hello World\" )\n" + // <<- DIFF "then\n" + "end\n" )).isTrue(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces( "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == \"Hello World\" )\n" + "then\n" + "end\n", "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == \"Hello World\" )\n" + // <<- DIFF "then\n" + "end\n" )).isFalse(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces( "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == \"Hello' World\" )\n" + "then\n" + "end\n", "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == \"Hello' World\" )\n" + // <<- DIFF "then\n" + "end\n" )).isFalse(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces( "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == 'Hello World' )\n" + "then\n" + "end\n", "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == 'Hello World' )\n" + // <<- DIFF "then\n" + "end\n" )).isFalse(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces( "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == 'Hello\" World' )\n" + "then\n" + "end\n", "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == 'Hello\" World' )\n" + // <<- DIFF "then\n" + "end\n" )).isFalse(); assertThat(StringUtils.codeAwareEqualsIgnoreSpaces( "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == 'Hello\\' World' )\n" + "then\n" + "end\n", "package org.drools.compiler\n" + "rule Rx when\n" + " $m : Message( message == 'Hello\\' World' )\n" + // <<- DIFF "then\n" + "end\n" )).isFalse(); }
@Override public List<Intent> compile(MultiPointToSinglePointIntent intent, List<Intent> installable) { Map<DeviceId, Link> links = new HashMap<>(); ConnectPoint egressPoint = intent.egressPoint(); final boolean allowMissingPaths = intentAllowsPartialFailure(intent); boolean hasPaths = false; boolean missingSomePaths = false; for (ConnectPoint ingressPoint : intent.ingressPoints()) { if (ingressPoint.deviceId().equals(egressPoint.deviceId())) { if (deviceService.isAvailable(ingressPoint.deviceId())) { hasPaths = true; } else { missingSomePaths = true; } continue; } Path path = getPath(intent, ingressPoint.deviceId(), egressPoint.deviceId()); if (path != null) { hasPaths = true; for (Link link : path.links()) { if (links.containsKey(link.dst().deviceId())) { // We've already reached the existing tree with the first // part of this path. Add the merging point with different // incoming port, but don't add the remainder of the path // in case it differs from the path we already have. links.put(link.src().deviceId(), link); break; } links.put(link.src().deviceId(), link); } } else { missingSomePaths = true; } } // Allocate bandwidth on existing paths if a bandwidth constraint is set List<ConnectPoint> ingressCPs = intent.filteredIngressPoints().stream() .map(fcp -> fcp.connectPoint()) .collect(Collectors.toList()); ConnectPoint egressCP = intent.filteredEgressPoint().connectPoint(); List<ConnectPoint> pathCPs = links.values().stream() .flatMap(l -> Stream.of(l.src(), l.dst())) .collect(Collectors.toList()); pathCPs.addAll(ingressCPs); pathCPs.add(egressCP); allocateBandwidth(intent, pathCPs); if (!hasPaths) { throw new IntentException("Cannot find any path between ingress and egress points."); } else if (!allowMissingPaths && missingSomePaths) { throw new IntentException("Missing some paths between ingress and egress points."); } Intent result = LinkCollectionIntent.builder() .appId(intent.appId()) .key(intent.key()) .treatment(intent.treatment()) .selector(intent.selector()) .links(Sets.newHashSet(links.values())) .filteredIngressPoints(intent.filteredIngressPoints()) .filteredEgressPoints(ImmutableSet.of(intent.filteredEgressPoint())) .priority(intent.priority()) .constraints(intent.constraints()) .resourceGroup(intent.resourceGroup()) .build(); return Collections.singletonList(result); }
@Test public void testSameDeviceCompilation() { Set<FilteredConnectPoint> ingress = Sets.newHashSet(new FilteredConnectPoint(new ConnectPoint(DID_1, PORT_1)), new FilteredConnectPoint(new ConnectPoint(DID_1, PORT_2))); FilteredConnectPoint egress = new FilteredConnectPoint(new ConnectPoint(DID_1, PORT_3)); MultiPointToSinglePointIntent intent = makeIntent(ingress, egress); assertThat(intent, is(notNullValue())); final String[] hops = {}; MultiPointToSinglePointIntentCompiler compiler = makeCompiler(hops); assertThat(compiler, is(notNullValue())); List<Intent> result = compiler.compile(intent, null); assertThat(result, is(notNullValue())); assertThat(result, hasSize(1)); Intent resultIntent = result.get(0); assertThat(resultIntent, instanceOf(LinkCollectionIntent.class)); if (resultIntent instanceof LinkCollectionIntent) { LinkCollectionIntent linkIntent = (LinkCollectionIntent) resultIntent; assertThat(linkIntent.links(), hasSize(0)); } assertThat("key is inherited", resultIntent.key(), is(intent.key())); }
static double toDouble(final JsonNode object) { if (object instanceof NumericNode) { return object.doubleValue(); } if (object instanceof TextNode) { try { return Double.parseDouble(object.textValue()); } catch (final NumberFormatException e) { throw failedStringCoercionException(SqlBaseType.DOUBLE); } } throw invalidConversionException(object, SqlBaseType.DOUBLE); }
@Test public void shouldConvertLongToDoubleCorrectly() { final Double d = JsonSerdeUtils.toDouble(JsonNodeFactory.instance.numberNode(1L)); assertThat(d, equalTo(1.0)); }
@Override public List<String> listPartitionColumns(Connection connection, String databaseName, String tableName) { String partitionColumnsQuery = "SELECT DISTINCT PARTITION_EXPRESSION FROM INFORMATION_SCHEMA.PARTITIONS " + "WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND PARTITION_NAME IS NOT NULL " + "AND ( PARTITION_METHOD = 'RANGE' or PARTITION_METHOD = 'RANGE COLUMNS') " + "AND PARTITION_EXPRESSION IS NOT NULL"; try (PreparedStatement ps = connection.prepareStatement(partitionColumnsQuery)) { ps.setString(1, databaseName); ps.setString(2, tableName); ResultSet rs = ps.executeQuery(); ImmutableList.Builder<String> list = ImmutableList.builder(); if (null != rs) { while (rs.next()) { String partitionColumn = rs.getString("PARTITION_EXPRESSION") .replace("`", ""); list.add(partitionColumn); } return list.build(); } else { return Lists.newArrayList(); } } catch (SQLException | NullPointerException e) { throw new StarRocksConnectorException(e.getMessage(), e); } }
@Test public void testListPartitionColumns() { try { JDBCMetadata jdbcMetadata = new JDBCMetadata(properties, "catalog", dataSource); Integer size = jdbcMetadata.listPartitionColumns("test", "tbl1", Arrays.asList(new Column("d", Type.VARCHAR))).size(); Assert.assertTrue(size > 0); } catch (Exception e) { System.out.println(e.getMessage()); Assert.fail(); } }
@Override public String version() { return VERSION; }
@Test public void shouldReturnOneDotZeroForVersion() { assertThat(new JsonBasedTaskExtensionHandler_V1().version(), is("1.0")); }
public abstract byte[] encode(MutableSpan input);
@Test void span_JSON_V2() { assertThat(new String(encoder.encode(clientSpan), UTF_8)) .isEqualTo( "{\"traceId\":\"7180c278b62e8f6a216a2aea45d08fc9\",\"parentId\":\"6b221d5bc9e6496c\",\"id\":\"5b4185666d50f68b\",\"kind\":\"CLIENT\",\"name\":\"get\",\"timestamp\":1472470996199000,\"duration\":207000,\"localEndpoint\":{\"serviceName\":\"frontend\",\"ipv4\":\"127.0.0.1\"},\"remoteEndpoint\":{\"serviceName\":\"backend\",\"ipv4\":\"192.168.99.101\",\"port\":9000},\"annotations\":[{\"timestamp\":1472470996238000,\"value\":\"foo\"},{\"timestamp\":1472470996403000,\"value\":\"bar\"}],\"tags\":{\"clnt/finagle.version\":\"6.45.0\",\"http.path\":\"/api\"}}"); }
@Override public void onDraw(Canvas canvas) { final boolean keyboardChanged = mKeyboardChanged; super.onDraw(canvas); // switching animation if (mAnimationLevel != AnimationsLevel.None && keyboardChanged && (mInAnimation != null)) { startAnimation(mInAnimation); mInAnimation = null; } if (mGestureTypingPathShouldBeDrawn) { mGestureDrawingHelper.draw(canvas); } // showing any requested watermark float watermarkX = mWatermarkEdgeX; final float watermarkY = getHeight() - mWatermarkDimen - mWatermarkMargin; for (Drawable watermark : mWatermarks) { watermarkX -= (mWatermarkDimen + mWatermarkMargin); canvas.translate(watermarkX, watermarkY); watermark.draw(canvas); canvas.translate(-watermarkX, -watermarkY); } }
@Test public void testDoesNotAddExtraDrawIfRtlWorkaround() { SharedPrefsHelper.setPrefsValue(R.string.settings_key_workaround_disable_rtl_fix, false); ExtraDraw mockDraw1 = Mockito.mock(ExtraDraw.class); Mockito.doReturn(true).when(mockDraw1).onDraw(any(), any(), same(mViewUnderTest)); Robolectric.getForegroundThreadScheduler().pause(); Assert.assertFalse(Robolectric.getForegroundThreadScheduler().areAnyRunnable()); mViewUnderTest.addExtraDraw(mockDraw1); Mockito.verify(mockDraw1, Mockito.never()).onDraw(any(), any(), any()); Assert.assertEquals(0, Robolectric.getForegroundThreadScheduler().size()); }
@VisibleForTesting String importSingleAlbum(UUID jobId, TokensAndUrlAuthData authData, MediaAlbum inputAlbum) throws IOException, InvalidTokenException, PermissionDeniedException, UploadErrorException { // Set up album GoogleAlbum googleAlbum = new GoogleAlbum(); googleAlbum.setTitle(GooglePhotosImportUtils.cleanAlbumTitle(inputAlbum.getName())); GoogleAlbum responseAlbum = getOrCreatePhotosInterface(jobId, authData).createAlbum(googleAlbum); return responseAlbum.getId(); }
@Test public void importAlbumWithITString() throws PermissionDeniedException, InvalidTokenException, IOException, UploadErrorException { String albumId = "Album Id"; String albumName = "Album Name"; String albumDescription = "Album Description"; MediaAlbum albumModel = new MediaAlbum(albumId, albumName, albumDescription); PortabilityJob portabilityJob = mock(PortabilityJob.class); Mockito.when(portabilityJob.userLocale()).thenReturn("it"); JobStore jobStore = mock(JobStore.class); Mockito.when(jobStore.findJob(uuid)).thenReturn(portabilityJob); GoogleAlbum responseAlbum = new GoogleAlbum(); responseAlbum.setId(NEW_ALBUM_ID); Mockito.when(googlePhotosInterface.createAlbum(any(GoogleAlbum.class))) .thenReturn(responseAlbum); PhotosLibraryClient photosLibraryClient = mock(PhotosLibraryClient.class); GoogleMediaImporter sut = new GoogleMediaImporter( null, /*credentialFactory*/ jobStore, null, /*jsonFactory*/ new HashMap<>(), /*photosInterfacesMap*/ new HashMap<>(), /*photosLibraryClientMap*/ appCredentials, googlePhotosInterface, connectionProvider, monitor, 1.0 /*writesPerSecond*/); sut.importSingleAlbum(uuid, null, albumModel); ArgumentCaptor<GoogleAlbum> albumArgumentCaptor = ArgumentCaptor.forClass(GoogleAlbum.class); Mockito.verify(googlePhotosInterface).createAlbum(albumArgumentCaptor.capture()); assertEquals(albumArgumentCaptor.getValue().getTitle(), albumName); }
static JobVertexInputInfo computeVertexInputInfoForPointwise( int sourceCount, int targetCount, Function<Integer, Integer> numOfSubpartitionsRetriever, boolean isDynamicGraph) { final List<ExecutionVertexInputInfo> executionVertexInputInfos = new ArrayList<>(); if (sourceCount >= targetCount) { for (int index = 0; index < targetCount; index++) { int start = index * sourceCount / targetCount; int end = (index + 1) * sourceCount / targetCount; IndexRange partitionRange = new IndexRange(start, end - 1); IndexRange subpartitionRange = computeConsumedSubpartitionRange( index, 1, () -> numOfSubpartitionsRetriever.apply(start), isDynamicGraph, false); executionVertexInputInfos.add( new ExecutionVertexInputInfo(index, partitionRange, subpartitionRange)); } } else { for (int partitionNum = 0; partitionNum < sourceCount; partitionNum++) { int start = (partitionNum * targetCount + sourceCount - 1) / sourceCount; int end = ((partitionNum + 1) * targetCount + sourceCount - 1) / sourceCount; int numConsumers = end - start; IndexRange partitionRange = new IndexRange(partitionNum, partitionNum); // Variable used in lambda expression should be final or effectively final final int finalPartitionNum = partitionNum; for (int i = start; i < end; i++) { IndexRange subpartitionRange = computeConsumedSubpartitionRange( i, numConsumers, () -> numOfSubpartitionsRetriever.apply(finalPartitionNum), isDynamicGraph, false); executionVertexInputInfos.add( new ExecutionVertexInputInfo(i, partitionRange, subpartitionRange)); } } } return new JobVertexInputInfo(executionVertexInputInfos); }
@Test void testComputeVertexInputInfoForPointwiseWithDynamicGraph() { final JobVertexInputInfo jobVertexInputInfo = computeVertexInputInfoForPointwise(2, 3, ignored -> 4, true); assertThat(jobVertexInputInfo.getExecutionVertexInputInfos()) .containsExactlyInAnyOrder( new ExecutionVertexInputInfo(0, new IndexRange(0, 0), new IndexRange(0, 1)), new ExecutionVertexInputInfo(1, new IndexRange(0, 0), new IndexRange(2, 3)), new ExecutionVertexInputInfo( 2, new IndexRange(1, 1), new IndexRange(0, 3))); }
public static Object getProperty(Object source, String key) { if (source instanceof Map) { return ((Map<?, ?>) source).get(key); } SingleValueMap<Object, Object> map = new SingleValueMap<>(); copy(source, map, include(key)); return map.getValue(); }
@Test public void testGetProperty() { Assert.assertEquals(1, FastBeanCopier.getProperty(ImmutableMap.of("a", 1, "b", 2), "a")); }
public void applyClientConfiguration(String account, DataLakeFileSystemClientBuilder builder) { String sasToken = adlsSasTokens.get(account); if (sasToken != null && !sasToken.isEmpty()) { builder.sasToken(sasToken); } else if (namedKeyCreds != null) { builder.credential( new StorageSharedKeyCredential(namedKeyCreds.getKey(), namedKeyCreds.getValue())); } else { builder.credential(new DefaultAzureCredentialBuilder().build()); } // apply connection string last so its parameters take precedence, e.g. SAS token String connectionString = adlsConnectionStrings.get(account); if (connectionString != null && !connectionString.isEmpty()) { builder.endpoint(connectionString); } else { builder.endpoint("https://" + account); } }
@Test public void testNoSasToken() { AzureProperties props = new AzureProperties(); DataLakeFileSystemClientBuilder clientBuilder = mock(DataLakeFileSystemClientBuilder.class); props.applyClientConfiguration("account", clientBuilder); verify(clientBuilder, times(0)).sasToken(any()); verify(clientBuilder).credential(any(TokenCredential.class)); verify(clientBuilder, never()).credential(any(StorageSharedKeyCredential.class)); }