index
int64 0
0
| repo_id
stringlengths 26
205
| file_path
stringlengths 51
246
| content
stringlengths 8
433k
| __index_level_0__
int64 0
10k
|
---|---|---|---|---|
0 |
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api
|
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/HollowClientMemoryConfig.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import com.netflix.hollow.api.consumer.HollowConsumer;
/**
* Defines various aspects of data access guarantees and update behavior which impact the heap footprint/GC behavior of hollow.
*
* Implementations are often a {@link SpecifiedConfig}.
*
* @deprecated Implement the {@link HollowConsumer.ObjectLongevityConfig} and/or {@link HollowConsumer.DoubleSnapshotConfig} for use
* with the {@link HollowConsumer} instead.
*
*/
public interface HollowClientMemoryConfig extends HollowConsumer.ObjectLongevityConfig {
long ONE_HOUR = 60 * 60 * 1000;
HollowClientMemoryConfig DEFAULT_CONFIG = new SpecifiedConfig(false, false, ONE_HOUR, ONE_HOUR);
/**
* @return whether or not a double snapshot will ever be attempted by the {@link HollowClient}
*/
boolean allowDoubleSnapshot();
class SpecifiedConfig implements HollowClientMemoryConfig {
private final boolean enableLongLivedObjectSupport;
private final boolean dropDataAutomatically;
private final long gracePeriodMillis;
private final long usageDetectionPeriodMillis;
public SpecifiedConfig(boolean enableLongLivedObjectSupport, boolean dropDataAutomatically,
long gracePeriodMillis, long usageDetectionPeriodMillis) {
this.enableLongLivedObjectSupport = enableLongLivedObjectSupport;
this.dropDataAutomatically = dropDataAutomatically;
this.gracePeriodMillis = gracePeriodMillis;
this.usageDetectionPeriodMillis = usageDetectionPeriodMillis;
}
public boolean enableLongLivedObjectSupport() { return enableLongLivedObjectSupport; }
public boolean dropDataAutomatically() { return dropDataAutomatically; }
public long gracePeriodMillis() { return gracePeriodMillis; }
public long usageDetectionPeriodMillis() { return usageDetectionPeriodMillis; }
public boolean enableExpiredUsageStackTraces() { return false; }
public boolean forceDropData() { return false; }
public boolean allowDoubleSnapshot() { return true; }
}
}
| 9,400 |
0 |
Create_ds/hollow/hollow-ui-tools/src/test/java/com/netflix/hollow
|
Create_ds/hollow/hollow-ui-tools/src/test/java/com/netflix/hollow/ui/HollowDiffUtilTest.java
|
package com.netflix.hollow.ui;
import static com.netflix.hollow.ui.HollowDiffUtil.formatBytes;
import org.junit.Assert;
import org.junit.Test;
public class HollowDiffUtilTest {
@Test
public void testFormatBytes() {
sampleTesting(1,"B", -10, 2, 0, 2, 10);
sampleTesting(Math.pow(2, 10),"KiB", -100, 50, 30, 100);
sampleTesting(Math.pow(2, 20),"MiB", -100, 50, 30, 100);
sampleTesting(Math.pow(2, 30),"GiB", -10, 30, 30, 100);
sampleTesting(Math.pow(2, 40),"TiB", -100, 50, 30, 100);
sampleTesting(Math.pow(2, 50),"PiB", -100, 50, 30, 100);
Assert.assertEquals( "-1,023 B", formatBytes(-1023));
Assert.assertEquals( "-1 KiB", formatBytes(-1024));
Assert.assertEquals( "1,000 TiB", formatBytes(1000 * (long)Math.pow(2, 40)));
Assert.assertEquals( "1 PiB", formatBytes(1024 * (long)Math.pow(2, 40)));
Assert.assertEquals( "8 EiB", formatBytes(Long.MAX_VALUE));
// Validate Decimal
Assert.assertEquals( "95.37 MiB", formatBytes(100000000));
Assert.assertEquals( "-9.54 MiB", formatBytes(-10000000));
Assert.assertEquals( "1.95 KiB", formatBytes(2001));
Assert.assertEquals( "19.53 KiB", formatBytes(20000));
Assert.assertEquals( "186.26 GiB", formatBytes(200000000000L));
}
private void sampleTesting(double multiple, String unit, long ... bytes) {
for(long b : bytes) {
Assert.assertEquals(b + " " + unit, formatBytes(b * (long)multiple));
}
}
}
| 9,401 |
0 |
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow/ui/UIServer.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.ui;
public interface UIServer {
void start() throws Exception;
void stop() throws Exception;
void join() throws InterruptedException;
}
| 9,402 |
0 |
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow/ui/HollowDiffUtil.java
|
package com.netflix.hollow.ui;
import java.text.DecimalFormat;
public class HollowDiffUtil {
private static final String[] HEAP_SIZE_UNITS = new String[] { "B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB"};
public static String formatBytes(long sizeInBytes) {
if (sizeInBytes==0) return "0 B";
String sign = (sizeInBytes < 0) ? "-" : "";
sizeInBytes = Math.abs(sizeInBytes);
int digitGroups = (int) (Math.log10(sizeInBytes)/Math.log10(1024));
DecimalFormat formatter = new DecimalFormat("#,##0.##");
return sign + formatter.format(sizeInBytes / Math.pow(1024, digitGroups)) + " " + HEAP_SIZE_UNITS[digitGroups];
}
}
| 9,403 |
0 |
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow/ui/HttpHandlerWithServletSupport.java
|
/*
* Copyright 2016-2023 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* Most of the code in this class can be attributed to Frode Carlsen, here is the original licensing:
*
* Copyright (c) 2013 Frode Carlsen.
* All rights reserved.
*
* Redistribution and use in source and binary forms are permitted
* provided that the above copyright notice and this paragraph are
* duplicated in all such forms and that any documentation,
* advertising materials, and other materials related to such
* distribution and use acknowledge that the software was developed
* by the <organization>. The name of the
* <organization> may not be used to endorse or promote products derived
* from this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*/
package com.netflix.hollow.ui;
import com.sun.net.httpserver.Headers;
import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpHandler;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import javax.servlet.ReadListener;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletOutputStream;
import javax.servlet.WriteListener;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponseWrapper;
import javax.servlet.http.HttpUtils;
public class HttpHandlerWithServletSupport implements HttpHandler {
private HttpServlet servlet;
private final class RequestWrapper extends HttpServletRequestWrapper {
private final HttpExchange ex;
private final Map<String, String[]> postData;
private final ServletInputStream is;
private final Map<String, Object> attributes = new HashMap<>();
private RequestWrapper(HttpServletRequest request, HttpExchange ex, Map<String, String[]> postData, ServletInputStream is) {
super(request);
this.ex = ex;
this.postData = postData;
this.is = is;
}
@Override
public Cookie[] getCookies() {
Headers headers = ex.getRequestHeaders();
if (headers != null) {
List<String> strCookies = headers.get("Cookie");
if (strCookies != null) {
List<Cookie> cookies = new ArrayList<>();
for (String cookieString : strCookies) {
String[] tokens = cookieString.split("\\s*;\\s*");
for (String token : tokens) {
String[] keyVal = token.split("\\s*=\\s*");
if(keyVal.length == 2){
cookies.add(new Cookie(keyVal[0], keyVal[1]));
}
}
}
return cookies.toArray(new Cookie[0]);
}
}
return null;
}
@Override
public String getHeader(String name) {
return ex.getRequestHeaders().getFirst(name);
}
@Override
public Enumeration<String> getHeaders(String name) {
return new Vector<String>(ex.getRequestHeaders().get(name)).elements();
}
@Override
public Enumeration<String> getHeaderNames() {
return new Vector<String>(ex.getRequestHeaders().keySet()).elements();
}
@Override
public Object getAttribute(String name) {
return attributes.get(name);
}
@Override
public void setAttribute(String name, Object o) {
this.attributes.put(name, o);
}
@Override
public Enumeration<String> getAttributeNames() {
return new Vector<String>(attributes.keySet()).elements();
}
@Override
public String getMethod() {
return ex.getRequestMethod();
}
@Override
public ServletInputStream getInputStream() throws IOException {
return is;
}
@Override
public BufferedReader getReader() throws IOException {
return new BufferedReader(new InputStreamReader(getInputStream()));
}
@Override
public String getPathInfo() {
return ex.getRequestURI().getPath();
}
@Override
public String getParameter(String name) {
String[] arr = postData.get(name);
return arr != null ? (arr.length > 1 ? Arrays.toString(arr) : arr[0]) : null;
}
@Override
public Map<String, String[]> getParameterMap() {
return postData;
}
@Override
public Enumeration<String> getParameterNames() {
return new Vector<String>(postData.keySet()).elements();
}
}
private final class ResponseWrapper extends HttpServletResponseWrapper {
final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
final ServletOutputStream servletOutputStream = new ServletOutputStream() {
@Override
public void write(int b) throws IOException {
outputStream.write(b);
}
@Override
public boolean isReady() {
return true;
}
@Override
public void setWriteListener(WriteListener writeListener) {
}
};
private final HttpExchange ex;
private final PrintWriter printWriter;
private int status = HttpServletResponse.SC_OK;
private ResponseWrapper(HttpServletResponse response, HttpExchange ex) {
super(response);
this.ex = ex;
printWriter = new PrintWriter(servletOutputStream);
}
@Override
public void setContentType(String type) {
ex.getResponseHeaders().add("Content-Type", type);
}
@Override
public void setHeader(String name, String value) {
ex.getResponseHeaders().add(name, value);
}
@Override
public javax.servlet.ServletOutputStream getOutputStream() throws IOException {
return servletOutputStream;
}
@Override
public void setContentLength(int len) {
ex.getResponseHeaders().add("Content-Length", len + "");
}
@Override
public void setStatus(int status) {
this.status = status;
}
@Override
public void sendError(int sc, String msg) throws IOException {
this.status = sc;
if (msg != null) {
printWriter.write(msg);
}
}
@Override
public void sendError(int sc) throws IOException {
sendError(sc, null);
}
@Override
public PrintWriter getWriter() throws IOException {
return printWriter;
}
@Override
public void addCookie(Cookie c) {
Headers header = ex.getResponseHeaders();
header.add("Set-Cookie", c.getName() + "=" + c.getValue());
}
public void complete() throws IOException {
try {
printWriter.flush();
ex.sendResponseHeaders(status, outputStream.size());
if (outputStream.size() > 0) {
ex.getResponseBody().write(outputStream.toByteArray());
}
ex.getResponseBody().flush();
} catch (Exception e) {
throw new IOException(e);
} finally {
ex.close();
}
}
}
public HttpHandlerWithServletSupport(HttpServlet servlet) {
this.servlet = servlet;
}
@SuppressWarnings("deprecation")
@Override
public void handle(final HttpExchange ex) throws IOException {
byte[] inBytes = getBytes(ex.getRequestBody());
ex.getRequestBody().close();
final ByteArrayInputStream newInput = new ByteArrayInputStream(inBytes);
final ServletInputStream is = new ServletInputStream() {
@Override
public int read() throws IOException {
return newInput.read();
}
@Override
public boolean isFinished() {
return true;
}
@Override
public boolean isReady() {
return true;
}
@Override
public void setReadListener(ReadListener readListener) {
}
};
Map<String, String[]> parsePostData = new HashMap<>();
try {
parsePostData.putAll(HttpUtils.parseQueryString(ex.getRequestURI().getQuery()));
// check if any postdata to parse
parsePostData.putAll(HttpUtils.parsePostData(inBytes.length, is));
} catch (IllegalArgumentException e) {
// no postData - just reset inputstream
newInput.reset();
}
final Map<String, String[]> postData = parsePostData;
RequestWrapper req = new RequestWrapper(createUnimplementAdapter(HttpServletRequest.class), ex, postData, is);
ResponseWrapper resp = new ResponseWrapper(createUnimplementAdapter(HttpServletResponse.class), ex);
try {
servlet.service(req, resp);
resp.complete();
} catch (ServletException e) {
throw new IOException(e);
}
}
private static byte[] getBytes(InputStream in) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
while (true) {
int r = in.read(buffer);
if (r == -1)
break;
out.write(buffer, 0, r);
}
return out.toByteArray();
}
@SuppressWarnings("unchecked")
private static <T> T createUnimplementAdapter(Class<T> httpServletApi) {
class UnimplementedHandler implements InvocationHandler {
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
throw new UnsupportedOperationException("Not implemented: " + method + ", args=" + Arrays.toString(args));
}
}
return (T) Proxy.newProxyInstance(UnimplementedHandler.class.getClassLoader(),
new Class<?>[] { httpServletApi },
new UnimplementedHandler());
}
}
| 9,404 |
0 |
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow/ui/HtmlEscapingWriter.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.ui;
import java.io.IOException;
import java.io.Writer;
import org.apache.commons.text.StringEscapeUtils;
public class HtmlEscapingWriter extends Writer {
private final Writer wrappedWriter;
public HtmlEscapingWriter(Writer writer) {
this.wrappedWriter = writer;
}
@Override
public void write(char[] cbuf, int off, int len) throws IOException {
wrappedWriter.write(StringEscapeUtils.escapeHtml4(new String(cbuf, off, len)));
}
@Override
public void flush() throws IOException {
wrappedWriter.flush();
}
@Override
public void close() throws IOException {
wrappedWriter.close();
}
}
| 9,405 |
0 |
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow/ui/EscapingTool.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.ui;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import org.apache.commons.text.StringEscapeUtils;
public class EscapingTool {
public String html(Object string) {
return string == null ? null : StringEscapeUtils.escapeHtml4(String.valueOf(string));
}
public String url(Object string) {
if (string == null) {
return null;
} else {
try {
return URLEncoder.encode(String.valueOf(string), StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException ex) {
return null;
}
}
}
}
| 9,406 |
0 |
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow/ui/HollowUIRouter.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.ui;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
import org.apache.velocity.app.VelocityEngine;
import org.apache.velocity.runtime.RuntimeConstants;
import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
public abstract class HollowUIRouter extends HttpServlet {
protected final String baseUrlPath;
protected final VelocityEngine velocityEngine;
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
try {
handle(request.getPathInfo(), request, response);
} catch (Exception ex) {
StringWriter stringWriter = new StringWriter();
PrintWriter printWriter = new PrintWriter(stringWriter);
ex.printStackTrace(printWriter);
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, stringWriter.toString());
}
}
public abstract boolean handle(String target, HttpServletRequest req, HttpServletResponse resp) throws IOException;
public HollowUIRouter(String baseUrlPath) {
if(!baseUrlPath.startsWith("/"))
baseUrlPath = "/" + baseUrlPath;
if(baseUrlPath.endsWith("/"))
baseUrlPath = baseUrlPath.substring(0, baseUrlPath.length() - 1);
this.baseUrlPath = baseUrlPath;
this.velocityEngine = initVelocity();
}
public VelocityEngine getVelocityEngine() {
return velocityEngine;
}
public String getBaseURLPath() {
return baseUrlPath;
}
protected String getTargetRootPath(String target) {
int baseLength = baseUrlPath.length() + 1;
if(target.length() < baseLength)
return "";
if (target == null) {
throw new IllegalStateException("target is null. It defaults to HttpServletRequest::getPathInfo() but can be " +
"customized by invoking handle method on HollowExplorerUI HollowDiffUI et al classes.");
}
int secondSlashIndex = target.indexOf('/', baseLength);
if(secondSlashIndex == -1)
return target.substring(baseLength);
return target.substring(baseLength, secondSlashIndex);
}
protected String getResourceName(String target, String diffUIKey) {
if (diffUIKey == null || diffUIKey.length() == 0) { // for diff at path ""
int baseLength = baseUrlPath.length() + 1;
int secondSlashIndex = target.indexOf('/', baseLength);
if(secondSlashIndex == -1) {
// a diff hosted at path ""
secondSlashIndex = target.indexOf('/');
if (secondSlashIndex == -1) {
return "";
}
}
return target.substring(secondSlashIndex + 1);
} else {
return getResourceName(target);
}
}
protected String getResourceName(String target) {
int baseLength = baseUrlPath.length() + 1;
int secondSlashIndex = target.indexOf('/', baseLength);
if(secondSlashIndex == -1) {
return "";
}
return target.substring(secondSlashIndex + 1);
}
protected boolean serveResource(HttpServletRequest req, HttpServletResponse resp, String resourceName) {
try {
if(resourceName.endsWith(".css")) {
resp.setContentType("text/css");
} else if(resourceName.endsWith(".js")) {
resp.setContentType("text/javascript");
} else if(resourceName.endsWith(".png")) {
resp.setContentType("image/png");
}
InputStream is = this.getClass().getResourceAsStream("/" + resourceName);
IOUtils.copy(is, resp.getOutputStream());
return true;
} catch(Exception e){
return false;
}
}
protected VelocityEngine initVelocity() {
VelocityEngine ve = new VelocityEngine();
ve.setProperty(RuntimeConstants.RESOURCE_LOADER, "classpath");
ve.setProperty("classpath.resource.loader.class", ClasspathResourceLoader.class.getName());
ve.setProperty("runtime.log.logsystem.class", "org.apache.velocity.runtime.log.SimpleLog4JLogSystem");
ve.setProperty("runtime.log.logsystem.log4j.category", "velocity");
ve.setProperty("runtime.log.logsystem.log4j.logger", "velocity");
ve.init();
return ve;
}
}
| 9,407 |
0 |
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow/ui/HollowUISession.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.ui;
import static com.netflix.hollow.core.util.Threads.daemonThread;
import static java.util.concurrent.TimeUnit.MINUTES;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class HollowUISession {
private static final long SESSION_ABANDONMENT_MILLIS = 60 * 60 * 1000;
private static final String HTTP_ONLY_COMMENT = "__HTTP_ONLY__";
private final Map<String, Object> sessionParams;
private long lastAccessed;
public HollowUISession() {
this.sessionParams = new ConcurrentHashMap<String, Object>();
}
public void clearAttribute(String param) {
sessionParams.remove(param);
}
public void setAttribute(String param, Object value) {
sessionParams.put(param, value);
}
public Object getAttribute(String param) {
return sessionParams.get(param);
}
public void updateLastAccessed() {
lastAccessed = System.currentTimeMillis();
}
private static final ConcurrentHashMap<Long, HollowUISession> sessions = new ConcurrentHashMap<Long, HollowUISession>();
public static HollowUISession getSession(HttpServletRequest req, HttpServletResponse resp) {
Long sessionId = null;
if(req.getCookies() != null) {
for(Cookie cookie : req.getCookies()) {
if("hollowUISessionId".equals(cookie.getName())) {
sessionId = Long.valueOf(cookie.getValue());
}
}
}
if(sessionId == null) {
sessionId = new Random().nextLong() & Long.MAX_VALUE;
Cookie cookie = new Cookie("hollowUISessionId", sessionId.toString());
cookie.setComment(HTTP_ONLY_COMMENT);
resp.addCookie(cookie);
}
HollowUISession session = sessions.get(sessionId);
if(session == null) {
session = new HollowUISession();
HollowUISession existingSession = sessions.putIfAbsent(sessionId, session);
if(existingSession != null)
session = existingSession;
}
session.updateLastAccessed();
return session;
}
static {
daemonThread(HollowUISession::cleanupSessions, HollowUISession.class, "session-cleanup")
.start();
}
private static void cleanupSessions() {
while (!Thread.currentThread().isInterrupted()) {
sessions.values()
.removeIf(s-> s.lastAccessed + SESSION_ABANDONMENT_MILLIS < System.currentTimeMillis());
try {
MINUTES.sleep(1);
} catch (InterruptedException e) { }
}
}
}
| 9,408 |
0 |
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-ui-tools/src/main/java/com/netflix/hollow/ui/HollowUIWebServer.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.ui;
import com.sun.net.httpserver.HttpServer;
import java.net.InetSocketAddress;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
public class HollowUIWebServer {
private HttpServer server;
private final HttpHandlerWithServletSupport handler;
private final int port;
private JoinableExecutorService executor;
/**
* Extends {@code ThreadPoolExecutor} to allow waiting indefinitely for termination of underlying threadpool
*/
private static class JoinableExecutorService extends ThreadPoolExecutor {
private CountDownLatch countDownLatch;
JoinableExecutorService() {
super(0, Integer.MAX_VALUE,
60L, TimeUnit.SECONDS,
new SynchronousQueue<Runnable>());
countDownLatch = new CountDownLatch(1);
}
@Override
protected void terminated() {
super.terminated();
countDownLatch.countDown();
}
void join() throws InterruptedException {
countDownLatch.await();
}
}
public HollowUIWebServer(HttpHandlerWithServletSupport handler, int port) {
this.port = port;
this.handler = handler;
this.executor = new JoinableExecutorService();
}
public void start() throws Exception {
server = HttpServer.create(new InetSocketAddress(port), 0);
server.createContext("/", this.handler);
server.setExecutor(executor);
server.start();
}
public void join() throws InterruptedException {
executor.join();
}
public void stop() throws Exception {
executor.shutdown();
try {
if (!executor.awaitTermination(10, TimeUnit.SECONDS)) {
executor.shutdownNow();
if (!executor.awaitTermination(10, TimeUnit.SECONDS))
System.err.println("Http Server ThreadPool did not terminate");
}
} catch (InterruptedException ie) {
executor.shutdownNow();
}
server.stop(0);
}
}
| 9,409 |
0 |
Create_ds/hollow/hollow-perf/src/main/java
|
Create_ds/hollow/hollow-perf/src/main/java/hollow/SegmentedLongArrayPlainPut.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package hollow;
import com.netflix.hollow.core.memory.HollowUnsafeHandle;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler;
import java.io.DataOutputStream;
import java.io.IOException;
import sun.misc.Unsafe;
// This is a copy of the class in hollow that replaces release stores with plain stores.
/**
* A segmented long array can grow without allocating successively larger blocks and copying memory.<p>
* <p>
* Segment length is always a power of two so that the location of a given index can be found with mask and shift operations.<p>
* <p>
* Conceptually this can be thought of as a single long array of undefined length. The currently allocated buffer will always be
* a multiple of the size of the segments. The buffer will grow automatically when a byte is written to an index greater than the
* currently allocated buffer.
*
* @author dkoszewnik
*/
@SuppressWarnings("restriction")
public class SegmentedLongArrayPlainPut {
private static final Unsafe unsafe = HollowUnsafeHandle.getUnsafe();
protected final long[][] segments;
protected final int log2OfSegmentSize;
protected final int bitmask;
public SegmentedLongArrayPlainPut(ArraySegmentRecycler memoryRecycler, long numLongs) {
this.log2OfSegmentSize = memoryRecycler.getLog2OfLongSegmentSize();
int numSegments = (int) ((numLongs - 1) >>> log2OfSegmentSize) + 1;
long[][] segments = new long[numSegments][];
this.bitmask = (1 << log2OfSegmentSize) - 1;
for (int i = 0; i < segments.length; i++) {
segments[i] = memoryRecycler.getLongArray();
}
/// The following assignment is purposefully placed *after* the population of all segments.
/// The final assignment after the initialization of the array guarantees that no thread
/// will see any of the array elements before assignment.
/// We can't risk the segment values being visible as null to any thread, because
/// FixedLengthElementArray uses Unsafe to access these values, which would cause the
/// JVM to crash with a segmentation fault.
this.segments = segments;
}
/**
* Set the byte at the given index to the specified value
*
* @param index specified index
* @param value to be inserted at specified index
*/
public void set(long index, long value) {
int segmentIndex = (int) (index >> log2OfSegmentSize);
int longInSegment = (int) (index & bitmask);
unsafe.putLong(segments[segmentIndex], (long) Unsafe.ARRAY_LONG_BASE_OFFSET + (8 * longInSegment), value);
/// duplicate the longs here so that we can read faster.
if (longInSegment == 0 && segmentIndex != 0) {
unsafe.putLong(segments[segmentIndex - 1],
(long) Unsafe.ARRAY_LONG_BASE_OFFSET + (8 * (1 << log2OfSegmentSize)), value);
}
}
/**
* Get the value of the byte at the specified index.
*
* @param index specified index
* @return long value of the byte at the specified index
*/
public long get(long index) {
int segmentIndex = (int) (index >>> log2OfSegmentSize);
return segments[segmentIndex][(int) (index & bitmask)];
}
public void fill(long value) {
for (int i = 0; i < segments.length; i++) {
long offset = Unsafe.ARRAY_LONG_BASE_OFFSET;
for (int j = 0; j < segments[i].length; j++) {
unsafe.putLong(segments[i], offset, value);
offset += 8;
}
}
}
public void writeTo(DataOutputStream dos, long numLongs) throws IOException {
VarInt.writeVLong(dos, numLongs);
for (long i = 0; i < numLongs; i++) {
dos.writeLong(get(i));
}
}
public void destroy(ArraySegmentRecycler memoryRecycler) {
for (int i = 0; i < segments.length; i++) {
if (segments[i] != null) {
memoryRecycler.recycleLongArray(segments[i]);
}
}
}
}
| 9,410 |
0 |
Create_ds/hollow/hollow-perf/src/main/java
|
Create_ds/hollow/hollow-perf/src/main/java/hollow/FixedLengthElementArrayPlainPut.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package hollow;
import com.netflix.hollow.core.memory.HollowUnsafeHandle;
import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler;
import sun.misc.Unsafe;
// This is a copy of the class in hollow that replaces release stores with plain stores.
/**
* Each record in Hollow begins with a fixed-length number of bits. At the lowest level, these bits
* are held in long arrays using the class FixedLengthElementArray. This class allows for storage
* and retrieval of fixed-length data in a range of bits. For example, if a FixedLengthElementArray
* was queried for the 6-bit value starting at bit 7 in the following example range of bits:
* <p>
* <pre>
* 0001000100100001101000010100101001111010101010010010101
* </pre>
* <p>
* The value 100100 in binary, or 36 in base 10, would be returned.
* <p>
* Note that for performance reasons, this class makes use of sun.misc.Unsafe to perform unaligned
* memory reads. This is designed exclusively for little-endian architectures, and has only been
* fully battle-tested on x86-64.
*/
@SuppressWarnings("restriction")
public class FixedLengthElementArrayPlainPut extends SegmentedLongArrayPlainPut {
private static final Unsafe unsafe = HollowUnsafeHandle.getUnsafe();
private final int log2OfSegmentSizeInBytes;
private final int byteBitmask;
public FixedLengthElementArrayPlainPut(ArraySegmentRecycler memoryRecycler, long numBits) {
super(memoryRecycler, ((numBits - 1) >>> 6) + 1);
this.log2OfSegmentSizeInBytes = log2OfSegmentSize + 3;
this.byteBitmask = (1 << log2OfSegmentSizeInBytes) - 1;
}
public void clearElementValue(long index, int bitsPerElement) {
long whichLong = index >>> 6;
int whichBit = (int) (index & 0x3F);
long mask = ((1L << bitsPerElement) - 1);
set(whichLong, get(whichLong) & ~(mask << whichBit));
int bitsRemaining = 64 - whichBit;
if (bitsRemaining < bitsPerElement) {
set(whichLong + 1, get(whichLong + 1) & ~(mask >>> bitsRemaining));
}
}
public void setElementValue(long index, int bitsPerElement, long value) {
long whichLong = index >>> 6;
int whichBit = (int) (index & 0x3F);
set(whichLong, get(whichLong) | (value << whichBit));
int bitsRemaining = 64 - whichBit;
if (bitsRemaining < bitsPerElement) {
set(whichLong + 1, get(whichLong + 1) | (value >>> bitsRemaining));
}
}
public long getElementValue(long index, int bitsPerElement) {
return getElementValue(index, bitsPerElement, ((1L << bitsPerElement) - 1));
}
public long getElementValue(long index, int bitsPerElement, long mask) {
long whichByte = index >>> 3;
int whichBit = (int) (index & 0x07);
int whichSegment = (int) (whichByte >>> log2OfSegmentSizeInBytes);
long[] segment = segments[whichSegment];
long elementByteOffset = (long) Unsafe.ARRAY_LONG_BASE_OFFSET + (whichByte & byteBitmask);
long l = unsafe.getLong(segment, elementByteOffset) >>> whichBit;
return l & mask;
}
public long getLargeElementValue(long index, int bitsPerElement) {
long mask = bitsPerElement == 64 ? -1 : ((1L << bitsPerElement) - 1);
return getLargeElementValue(index, bitsPerElement, mask);
}
public long getLargeElementValue(long index, int bitsPerElement, long mask) {
long whichLong = index >>> 6;
int whichBit = (int) (index & 0x3F);
long l = get(whichLong) >>> whichBit;
int bitsRemaining = 64 - whichBit;
if (bitsRemaining < bitsPerElement) {
whichLong++;
l |= get(whichLong) << bitsRemaining;
}
return l & mask;
}
public void copyBits(
com.netflix.hollow.core.memory.encoding.FixedLengthElementArray copyFrom, long sourceStartBit,
long destStartBit, long numBits) {
if (numBits == 0) {
return;
}
if ((destStartBit & 63) != 0) {
int fillBits = (int) Math.min(64 - (destStartBit & 63), numBits);
long fillValue = copyFrom.getLargeElementValue(sourceStartBit, fillBits);
setElementValue(destStartBit, fillBits, fillValue);
destStartBit += fillBits;
sourceStartBit += fillBits;
numBits -= fillBits;
}
long currentWriteLong = destStartBit >>> 6;
while (numBits >= 64) {
long l = copyFrom.getLargeElementValue(sourceStartBit, 64, -1);
set(currentWriteLong, l);
numBits -= 64;
sourceStartBit += 64;
currentWriteLong++;
}
if (numBits != 0) {
destStartBit = currentWriteLong << 6;
long fillValue = copyFrom.getLargeElementValue(sourceStartBit, (int) numBits);
setElementValue(destStartBit, (int) numBits, fillValue);
}
}
public void incrementMany(long startBit, long increment, long bitsBetweenIncrements, int numIncrements) {
long endBit = startBit + (bitsBetweenIncrements * numIncrements);
for (; startBit < endBit; startBit += bitsBetweenIncrements) {
increment(startBit, increment);
}
}
public void increment(long index, long increment) {
long whichByte = index >>> 3;
int whichBit = (int) (index & 0x07);
int whichSegment = (int) (whichByte >>> log2OfSegmentSizeInBytes);
long[] segment = segments[whichSegment];
long elementByteOffset = (long) Unsafe.ARRAY_LONG_BASE_OFFSET + (whichByte & byteBitmask);
long l = unsafe.getLong(segment, elementByteOffset);
unsafe.putLong(segment, elementByteOffset, l + (increment << whichBit));
/// update the fencepost longs
if ((whichByte & byteBitmask) > bitmask * 8 && (whichSegment + 1) < segments.length) {
unsafe.putLong(segments[whichSegment + 1], (long) Unsafe.ARRAY_LONG_BASE_OFFSET,
segments[whichSegment][bitmask + 1]);
}
if ((whichByte & byteBitmask) < 8 && whichSegment > 0) {
unsafe.putLong(segments[whichSegment - 1], (long) Unsafe.ARRAY_LONG_BASE_OFFSET + (8 * (bitmask + 1)),
segments[whichSegment][0]);
}
}
public static int bitsRequiredToRepresentValue(long value) {
if (value == 0) {
return 1;
}
return 64 - Long.numberOfLeadingZeros(value);
}
}
| 9,411 |
0 |
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/memory
|
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/memory/encoding/HashCodesBenchmark.java
|
package com.netflix.hollow.core.memory.encoding;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.TimeValue;
@State(Scope.Thread)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@Warmup(iterations = 5, time = 1)
@Measurement(iterations = 5, time = 1)
@Fork(1)
public class HashCodesBenchmark {
private ThreadLocalRandom random = ThreadLocalRandom.current();
@Param( {"1", "10", "1000"})
int length;
byte[] charData;
byte[] multibyteCharData;
int intKey;
long longKey;
String stringKey;
String multibyteStringKey;
@Setup
public void setup() {
charData = new byte[length];
multibyteCharData = new byte[length];
for (int i = 0; i < length; i++) {
charData[i] = (byte) random.nextInt(0x80);
multibyteCharData[i] = (byte) random.nextInt(Character.MAX_VALUE);
}
stringKey = new String(charData);
multibyteStringKey = new String(multibyteCharData);
}
@Benchmark
public int hashInt() {
return HashCodes.hashInt(intKey);
}
@Benchmark
public int hashLong() {
return HashCodes.hashLong(longKey);
}
@Benchmark
public int hashString() {
return HashCodes.hashCode(stringKey);
}
@Benchmark
public int hashStringMultibyte() {
return HashCodes.hashCode(multibyteStringKey);
}
@Benchmark
public int hashBytes() {
return HashCodes.hashCode(charData);
}
public static void main(String[] args) throws RunnerException {
Options opt = new OptionsBuilder()
.include(HashCodesBenchmark.class.getSimpleName())
.warmupIterations(5)
.warmupTime(TimeValue.seconds(1))
.measurementIterations(1)
.measurementTime(TimeValue.seconds(3))
.forks(1)
.build();
new Runner(opt).run();
}
}
| 9,412 |
0 |
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/memory
|
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/memory/encoding/CheckSumCollections.java
|
package com.netflix.hollow.core.memory.encoding;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
import static java.util.stream.Collectors.toSet;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSetSchema;
import com.netflix.hollow.core.util.StateEngineRoundTripper;
import com.netflix.hollow.core.write.HollowListTypeWriteState;
import com.netflix.hollow.core.write.HollowMapTypeWriteState;
import com.netflix.hollow.core.write.HollowSetTypeWriteState;
import com.netflix.hollow.core.write.HollowTypeWriteState;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.objectmapper.HollowObjectMapper;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.function.IntFunction;
import java.util.stream.IntStream;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
@State(Scope.Thread)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@Warmup(iterations = 5, time = 1)
@Measurement(iterations = 5, time = 1)
@Fork(1)
public class CheckSumCollections {
static class Model {
List<Integer> l;
Set<Integer> s;
Map<Integer, Integer> m;
Model(List<Integer> l) {
this.l = l;
}
Model(Set<Integer> s) {
this.s = s;
}
Model(Map<Integer, Integer> m) {
this.m = m;
}
}
public enum Type {
List, Set, Map
}
HollowReadStateEngine readState;
HollowTypeReadState typeReadState;
HollowSchema schema;
@Param("List")
private Type type = Type.Map;
@Param("100")
private int n = 1000;
@Param("100")
private int size = 1000;
@Param("8")
private int shards = 8;
@Param("false")
private boolean remove = false;
@Setup
public void setUp() throws IOException {
HollowWriteStateEngine w = new HollowWriteStateEngine();
HollowTypeWriteState typeWriteState;
IntFunction<Model> f;
String schemaName;
switch (type) {
case List:
schemaName = "ListOfInteger";
typeWriteState = new HollowListTypeWriteState(
new HollowListSchema(schemaName, "Integer"),
shards);
f = i -> new Model(IntStream.range(i, i + size).boxed().collect(toList()));
break;
case Set:
schemaName = "SetOfInteger";
typeWriteState = new HollowSetTypeWriteState(
new HollowSetSchema(schemaName, "Integer"),
shards);
f = i -> new Model(IntStream.range(i, i + size).boxed().collect(toSet()));
break;
case Map:
schemaName = "MapOfIntegerToInteger";
typeWriteState = new HollowMapTypeWriteState(
new HollowMapSchema(schemaName, "Integer", "Integer"),
shards);
f = i -> new Model(IntStream.range(i, i + size).boxed().collect(toMap(e -> e, e -> e)));
break;
default:
throw new Error();
}
w.addTypeState(typeWriteState);
HollowObjectMapper m = new HollowObjectMapper(w);
for (int i = 0; i < n; i++) {
m.add(f.apply(i));
}
readState = new HollowReadStateEngine();
StateEngineRoundTripper.roundTripSnapshot(w, readState);
if (remove) {
for (int i = 0; i < n; i++) {
if (i % 3 == 0) {
m.add(f.apply(i));
}
}
readState = new HollowReadStateEngine();
StateEngineRoundTripper.roundTripSnapshot(w, readState);
}
typeReadState = readState.getTypeState(schemaName);
schema = typeReadState.getSchema();
}
// Reads
@Benchmark
public int checkSum() {
return typeReadState.getChecksum(schema).intValue();
}
public static void main(String[] args) throws Exception {
System.setProperty("CHECK", "true");
CheckSumCollections x = new CheckSumCollections();
x.setUp();
x.checkSum();
}
}
/*
Small number of small collections and no shards.
----
Old checksum loop
--
Benchmark (n) (remove) (shards) (size) (type) Mode Cnt Score Error Units
CheckSumCollections.checkSum 100 false 1 5 List avgt 5 8620.064 ± 76.049 ns/op
CheckSumCollections.checkSum 100 false 1 5 Set avgt 5 8892.214 ± 560.921 ns/op
CheckSumCollections.checkSum 100 false 1 5 Map avgt 5 11697.191 ± 973.119 ns/op
CheckSumCollections.checkSum 100 true 1 5 List avgt 5 2989.913 ± 191.445 ns/op
CheckSumCollections.checkSum 100 true 1 5 Set avgt 5 2997.619 ± 221.645 ns/op
CheckSumCollections.checkSum 100 true 1 5 Map avgt 5 4053.238 ± 460.839 ns/op
New checksum loop
--
Benchmark (n) (remove) (shards) (size) (type) Mode Cnt Score Error Units
CheckSumCollections.checkSum 100 false 1 5 List avgt 5 8629.582 ± 870.650 ns/op
CheckSumCollections.checkSum 100 false 1 5 Set avgt 5 8775.555 ± 407.123 ns/op
CheckSumCollections.checkSum 100 false 1 5 Map avgt 5 11713.213 ± 1160.021 ns/op
CheckSumCollections.checkSum 100 true 1 5 List avgt 5 2950.090 ± 94.284 ns/op
CheckSumCollections.checkSum 100 true 1 5 Set avgt 5 2949.931 ± 28.125 ns/op
CheckSumCollections.checkSum 100 true 1 5 Map avgt 5 4024.719 ± 64.082 ns/op
Results show no regressions with new checksum loop.
Large number of small collections with increasing shards
----
Old checksum loop
--
Benchmark (n) (remove) (shards) (size) (type) Mode Cnt Score Error Units
CheckSumCollections.checkSum 100000 false 1 5 List avgt 5 8596610.673 ± 690674.898 ns/op
CheckSumCollections.checkSum 100000 false 2 5 List avgt 5 8921474.548 ± 617572.581 ns/op
CheckSumCollections.checkSum 100000 false 4 5 List avgt 5 9777300.517 ± 824076.745 ns/op
CheckSumCollections.checkSum 100000 false 8 5 List avgt 5 11111542.572 ± 1845196.989 ns/op
CheckSumCollections.checkSum 100000 false 16 5 List avgt 5 14216973.678 ± 930008.717 ns/op
CheckSumCollections.checkSum 100000 false 32 5 List avgt 5 20295407.292 ± 143616.170 ns/op
CheckSumCollections.checkSum 100000 false 64 5 List avgt 5 33702448.882 ± 2725699.235 ns/op
CheckSumCollections.checkSum 100000 false 1 5 Set avgt 5 9814348.577 ± 787704.885 ns/op
CheckSumCollections.checkSum 100000 false 2 5 Set avgt 5 11170034.021 ± 675925.787 ns/op
CheckSumCollections.checkSum 100000 false 4 5 Set avgt 5 12178179.723 ± 160719.626 ns/op
CheckSumCollections.checkSum 100000 false 8 5 Set avgt 5 14104921.070 ± 1424761.987 ns/op
CheckSumCollections.checkSum 100000 false 16 5 Set avgt 5 17494716.566 ± 214137.176 ns/op
CheckSumCollections.checkSum 100000 false 32 5 Set avgt 5 25036660.934 ± 1996250.093 ns/op
CheckSumCollections.checkSum 100000 false 64 5 Set avgt 5 37965918.676 ± 2671357.342 ns/op
CheckSumCollections.checkSum 100000 false 1 5 Map avgt 5 12865598.249 ± 937229.106 ns/op
CheckSumCollections.checkSum 100000 false 2 5 Map avgt 5 14121394.461 ± 1054106.360 ns/op
CheckSumCollections.checkSum 100000 false 4 5 Map avgt 5 15375576.497 ± 1323319.531 ns/op
CheckSumCollections.checkSum 100000 false 8 5 Map avgt 5 17339022.521 ± 1372546.941 ns/op
CheckSumCollections.checkSum 100000 false 16 5 Map avgt 5 20806426.523 ± 1809170.036 ns/op
CheckSumCollections.checkSum 100000 false 32 5 Map avgt 5 28236386.475 ± 1311581.690 ns/op
CheckSumCollections.checkSum 100000 false 64 5 Map avgt 5 40241047.257 ± 2318284.057 ns/op
CheckSumCollections.checkSum 100000 true 1 5 List avgt 5 2839641.637 ± 12376.643 ns/op
CheckSumCollections.checkSum 100000 true 2 5 List avgt 5 2990283.962 ± 162829.807 ns/op
CheckSumCollections.checkSum 100000 true 4 5 List avgt 5 3284778.179 ± 217469.913 ns/op
CheckSumCollections.checkSum 100000 true 8 5 List avgt 5 3740442.175 ± 218747.530 ns/op
CheckSumCollections.checkSum 100000 true 16 5 List avgt 5 4724311.888 ± 317028.345 ns/op
CheckSumCollections.checkSum 100000 true 32 5 List avgt 5 6812155.755 ± 56129.624 ns/op
CheckSumCollections.checkSum 100000 true 64 5 List avgt 5 11197537.392 ± 726553.201 ns/op
CheckSumCollections.checkSum 100000 true 1 5 Set avgt 5 3685641.952 ± 255347.469 ns/op
CheckSumCollections.checkSum 100000 true 2 5 Set avgt 5 3930741.424 ± 407687.414 ns/op
CheckSumCollections.checkSum 100000 true 4 5 Set avgt 5 4186047.862 ± 442308.278 ns/op
CheckSumCollections.checkSum 100000 true 8 5 Set avgt 5 4833507.041 ± 441235.419 ns/op
CheckSumCollections.checkSum 100000 true 16 5 Set avgt 5 6054760.912 ± 501335.774 ns/op
CheckSumCollections.checkSum 100000 true 32 5 Set avgt 5 8316517.003 ± 1078055.050 ns/op
CheckSumCollections.checkSum 100000 true 64 5 Set avgt 5 12462405.661 ± 781315.293 ns/op
CheckSumCollections.checkSum 100000 true 1 5 Map avgt 5 4765964.359 ± 400387.223 ns/op
CheckSumCollections.checkSum 100000 true 2 5 Map avgt 5 4908237.594 ± 444764.593 ns/op
CheckSumCollections.checkSum 100000 true 4 5 Map avgt 5 5316602.781 ± 660639.621 ns/op
CheckSumCollections.checkSum 100000 true 8 5 Map avgt 5 5798124.648 ± 492634.889 ns/op
CheckSumCollections.checkSum 100000 true 16 5 Map avgt 5 7135686.359 ± 562515.151 ns/op
CheckSumCollections.checkSum 100000 true 32 5 Map avgt 5 9403437.346 ± 640716.225 ns/op
CheckSumCollections.checkSum 100000 true 64 5 Map avgt 5 13511094.454 ± 1048413.356 ns/op
New checksum loop
--
Benchmark (n) (remove) (shards) (size) (type) Mode Cnt Score Error Units
CheckSumCollections.checkSum 100000 false 1 5 List avgt 5 8620584.454 ± 702278.822 ns/op
CheckSumCollections.checkSum 100000 false 2 5 List avgt 5 8575850.611 ± 555832.071 ns/op
CheckSumCollections.checkSum 100000 false 4 5 List avgt 5 8584383.175 ± 562913.671 ns/op
CheckSumCollections.checkSum 100000 false 8 5 List avgt 5 8532158.012 ± 207276.206 ns/op
CheckSumCollections.checkSum 100000 false 16 5 List avgt 5 8447779.118 ± 499044.657 ns/op
CheckSumCollections.checkSum 100000 false 32 5 List avgt 5 8451732.010 ± 644022.611 ns/op
CheckSumCollections.checkSum 100000 false 64 5 List avgt 5 8499921.507 ± 753095.911 ns/op
CheckSumCollections.checkSum 100000 false 1 5 Set avgt 5 9903639.521 ± 495051.194 ns/op
CheckSumCollections.checkSum 100000 false 2 5 Set avgt 5 10588014.844 ± 770598.836 ns/op
CheckSumCollections.checkSum 100000 false 4 5 Set avgt 5 11390983.126 ± 913094.206 ns/op
CheckSumCollections.checkSum 100000 false 8 5 Set avgt 5 11607058.027 ± 870592.896 ns/op
CheckSumCollections.checkSum 100000 false 16 5 Set avgt 5 11250836.388 ± 660928.743 ns/op
CheckSumCollections.checkSum 100000 false 32 5 Set avgt 5 11338874.644 ± 635317.786 ns/op
CheckSumCollections.checkSum 100000 false 64 5 Set avgt 5 11139918.800 ± 888915.970 ns/op
CheckSumCollections.checkSum 100000 false 1 5 Map avgt 5 12734036.092 ± 838503.473 ns/op
CheckSumCollections.checkSum 100000 false 2 5 Map avgt 5 13674956.077 ± 1149760.294 ns/op
CheckSumCollections.checkSum 100000 false 4 5 Map avgt 5 14134753.803 ± 1068061.447 ns/op
CheckSumCollections.checkSum 100000 false 8 5 Map avgt 5 14276311.270 ± 885502.514 ns/op
CheckSumCollections.checkSum 100000 false 16 5 Map avgt 5 14245044.137 ± 918599.933 ns/op
CheckSumCollections.checkSum 100000 false 32 5 Map avgt 5 14248491.342 ± 281088.157 ns/op
CheckSumCollections.checkSum 100000 false 64 5 Map avgt 5 14416292.644 ± 740792.676 ns/op
CheckSumCollections.checkSum 100000 true 1 5 List avgt 5 2897102.147 ± 413392.257 ns/op
CheckSumCollections.checkSum 100000 true 2 5 List avgt 5 3026996.759 ± 304404.892 ns/op
CheckSumCollections.checkSum 100000 true 4 5 List avgt 5 3150420.221 ± 161872.430 ns/op
CheckSumCollections.checkSum 100000 true 8 5 List avgt 5 3046548.631 ± 22866.490 ns/op
CheckSumCollections.checkSum 100000 true 16 5 List avgt 5 3091243.402 ± 183414.075 ns/op
CheckSumCollections.checkSum 100000 true 32 5 List avgt 5 3174963.994 ± 158946.528 ns/op
CheckSumCollections.checkSum 100000 true 64 5 List avgt 5 3218827.226 ± 396990.207 ns/op
CheckSumCollections.checkSum 100000 true 1 5 Set avgt 5 3663434.272 ± 210262.598 ns/op
CheckSumCollections.checkSum 100000 true 2 5 Set avgt 5 3899553.638 ± 111356.848 ns/op
CheckSumCollections.checkSum 100000 true 4 5 Set avgt 5 4169134.313 ± 411031.394 ns/op
CheckSumCollections.checkSum 100000 true 8 5 Set avgt 5 4035417.228 ± 31272.379 ns/op
CheckSumCollections.checkSum 100000 true 16 5 Set avgt 5 4081185.262 ± 358245.310 ns/op
CheckSumCollections.checkSum 100000 true 32 5 Set avgt 5 4005187.952 ± 262924.102 ns/op
CheckSumCollections.checkSum 100000 true 64 5 Set avgt 5 4135250.793 ± 302641.176 ns/op
CheckSumCollections.checkSum 100000 true 1 5 Map avgt 5 4752774.882 ± 303454.606 ns/op
CheckSumCollections.checkSum 100000 true 2 5 Map avgt 5 5098274.846 ± 453128.125 ns/op
CheckSumCollections.checkSum 100000 true 4 5 Map avgt 5 5218318.468 ± 483634.597 ns/op
CheckSumCollections.checkSum 100000 true 8 5 Map avgt 5 5206996.794 ± 464277.681 ns/op
CheckSumCollections.checkSum 100000 true 16 5 Map avgt 5 5098079.831 ± 372875.532 ns/op
CheckSumCollections.checkSum 100000 true 32 5 Map avgt 5 5553091.761 ± 1088287.273 ns/op
CheckSumCollections.checkSum 100000 true 64 5 Map avgt 5 5192518.094 ± 161140.260 ns/op
Results show that for the old checksum loop the time increases as the shards increase where as for the new
checksum loop the time is approximately constant (a slight increase with shards). For a list with a shard size
of 64 the new checksum loop is (33702448.882 / 8499921.507 = 3.97, 11197537.392 / 3218827.226 = 3.48) is 3x to 4x
faster.
*/
| 9,413 |
0 |
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/memory
|
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/memory/encoding/ReadWriteFixedLengthElementArrayTest.java
|
package com.netflix.hollow.core.memory.encoding;
import com.netflix.hollow.core.memory.pool.WastefulRecycler;
import hollow.FixedLengthElementArrayPlainPut;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
@State(Scope.Thread)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@Warmup(iterations = 5, time = 1)
@Measurement(iterations = 5, time = 1)
@Fork(1)
public class ReadWriteFixedLengthElementArrayTest {
@Param( {"4096"})
private int bitSize;
@Param( {"32"})
private int bitsPerElement;
@Param( {"1"})
private int bitsPerStep;
FixedLengthElementArray f;
FixedLengthElementArrayPlainPut fm;
int value;
@Setup
public void setUp() {
f = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, bitSize);
fm = new FixedLengthElementArrayPlainPut(WastefulRecycler.DEFAULT_INSTANCE, bitSize);
value = ThreadLocalRandom.current().nextInt(0, 1 << bitsPerElement);
}
// Reads
@Benchmark
public int read() {
int sum = 0;
for (int i = 0; i < (bitSize - bitsPerElement); i++) {
sum += f.getElementValue(i, bitsPerElement);
}
return sum;
}
@Benchmark
public int readLarge() {
int sum = 0;
for (int i = 0; i < (bitSize - bitsPerElement); i++) {
sum += f.getLargeElementValue(i, bitsPerElement);
}
return sum;
}
// Writes
@Benchmark
public Object writeOrdered() {
int sum = 0;
for (int i = 0; i < (bitSize - bitsPerElement); i += bitsPerStep) {
f.setElementValue(i, bitsPerElement, value);
}
return f;
}
@Benchmark
public Object writePlain() {
int sum = 0;
for (int i = 0; i < (bitSize - bitsPerElement); i += bitsPerStep) {
fm.setElementValue(i, bitsPerElement, value);
}
return f;
}
}
/*
# JMH version: 1.21-SNAPSHOT
# VM version: JDK 1.8.0_181, VM 25.181-b13
# VM invoker: /Library/Java/JavaVirtualMachines/jdk1.8.0_181.jdk/Contents/Home/jre/bin/java
# VM options: -XX:-TieredCompilation
...
Benchmark (bitSize) (bitsPerElement) (bitsPerStep) Mode Cnt Score Error Units
ReadWriteFixedLengthElementArrayTest.read 4096 1 1 avgt 5 6868.944 ± 133.584 ns/op
ReadWriteFixedLengthElementArrayTest.read 4096 8 1 avgt 5 6885.112 ± 63.520 ns/op
ReadWriteFixedLengthElementArrayTest.read 4096 32 1 avgt 5 6840.659 ± 82.540 ns/op
ReadWriteFixedLengthElementArrayTest.read 4096 60 1 avgt 5 6771.069 ± 153.265 ns/op
ReadWriteFixedLengthElementArrayTest.readLarge 4096 1 1 avgt 5 9484.960 ± 532.618 ns/op
ReadWriteFixedLengthElementArrayTest.readLarge 4096 8 1 avgt 5 10041.474 ± 813.135 ns/op
ReadWriteFixedLengthElementArrayTest.readLarge 4096 32 1 avgt 5 12186.423 ± 149.614 ns/op
ReadWriteFixedLengthElementArrayTest.readLarge 4096 60 1 avgt 5 14307.823 ± 842.377 ns/op
Benchmark (bitSize) (bitsPerElement) (bitsPerStep) Mode Cnt Score Error Units
ReadWriteFixedLengthElementArrayTest.writeOrdered 4096 1 1 avgt 5 15967.409 ± 339.812 ns/op
ReadWriteFixedLengthElementArrayTest.writeOrdered 4096 8 1 avgt 5 18288.946 ± 724.235 ns/op
ReadWriteFixedLengthElementArrayTest.writeOrdered 4096 32 1 avgt 5 21423.327 ± 1302.434 ns/op
ReadWriteFixedLengthElementArrayTest.writeOrdered 4096 60 1 avgt 5 25419.920 ± 1808.246 ns/op
ReadWriteFixedLengthElementArrayTest.writePlain 4096 1 1 avgt 5 13209.819 ± 542.676 ns/op
ReadWriteFixedLengthElementArrayTest.writePlain 4096 8 1 avgt 5 14119.386 ± 490.491 ns/op
ReadWriteFixedLengthElementArrayTest.writePlain 4096 32 1 avgt 5 17476.441 ± 1187.400 ns/op
ReadWriteFixedLengthElementArrayTest.writePlain 4096 60 1 avgt 5 21337.294 ± 448.718 ns/op
*/
| 9,414 |
0 |
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/memory
|
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/memory/encoding/OrdinalMapResize.java
|
package com.netflix.hollow.core.memory.encoding;
import com.netflix.hollow.core.memory.ByteArrayOrdinalMap;
import com.netflix.hollow.core.memory.ByteDataArray;
import java.util.SplittableRandom;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
@State(Scope.Thread)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@Warmup(iterations = 5, time = 1)
@Measurement(iterations = 5, time = 1)
@Fork(1)
public class OrdinalMapResize {
@Param("256")
int n = 256;
@Param("32")
int contentSize = 32;
ByteDataArray[] content;
@Setup
public void setUp() {
SplittableRandom r = new SplittableRandom(0);
content = new ByteDataArray[n];
for (int i = 0; i < n; i++) {
ByteDataArray buf = new ByteDataArray();
for (int j = 0; j < contentSize; j++) {
buf.write((byte) r.nextInt(0, 256));
}
content[i] = buf;
}
}
@Benchmark
public ByteArrayOrdinalMap defaultGet() {
ByteArrayOrdinalMap map = new ByteArrayOrdinalMap();
for (int i = 0; i < n; i++) {
map.getOrAssignOrdinal(content[i]);
}
return map;
}
@Benchmark
public ByteArrayOrdinalMap sizedGet() {
ByteArrayOrdinalMap map = new ByteArrayOrdinalMap(n << 1);
for (int i = 0; i < n; i++) {
map.getOrAssignOrdinal(content[i]);
}
return map;
}
}
/*
Benchmark (contentSize) (n) Mode Cnt Score Error Units
OrdinalMapResize.defaultGet 8 2048 avgt 5 524669.559 ± 94661.993 ns/op
OrdinalMapResize.defaultGet 8 8192 avgt 5 2864984.097 ± 973573.341 ns/op
OrdinalMapResize.defaultGet 8 32768 avgt 5 14644216.493 ± 461996.760 ns/op
OrdinalMapResize.defaultGet 8 131072 avgt 5 64906816.288 ± 5527376.480 ns/op
OrdinalMapResize.defaultGet 16 2048 avgt 5 579804.853 ± 35227.500 ns/op
OrdinalMapResize.defaultGet 16 8192 avgt 5 3226875.857 ± 1200816.411 ns/op
OrdinalMapResize.defaultGet 16 32768 avgt 5 16333522.740 ± 1645155.319 ns/op
OrdinalMapResize.defaultGet 16 131072 avgt 5 72300430.931 ± 8243987.662 ns/op
OrdinalMapResize.defaultGet 32 2048 avgt 5 708348.638 ± 48480.615 ns/op
OrdinalMapResize.defaultGet 32 8192 avgt 5 3845712.806 ± 659749.886 ns/op
OrdinalMapResize.defaultGet 32 32768 avgt 5 19132132.938 ± 2578367.604 ns/op
OrdinalMapResize.defaultGet 32 131072 avgt 5 81810115.046 ± 5377543.308 ns/op
OrdinalMapResize.defaultGet 64 2048 avgt 5 988812.959 ± 139916.394 ns/op
OrdinalMapResize.defaultGet 64 8192 avgt 5 5247170.001 ± 961661.753 ns/op
OrdinalMapResize.defaultGet 64 32768 avgt 5 24915997.247 ± 5133110.667 ns/op
OrdinalMapResize.defaultGet 64 131072 avgt 5 106995398.542 ± 14459051.669 ns/op
OrdinalMapResize.defaultGet 128 2048 avgt 5 1575062.220 ± 110460.192 ns/op
OrdinalMapResize.defaultGet 128 8192 avgt 5 7735092.080 ± 411007.955 ns/op
OrdinalMapResize.defaultGet 128 32768 avgt 5 35920602.082 ± 7833800.705 ns/op
OrdinalMapResize.defaultGet 128 131072 avgt 5 156354984.171 ± 13402008.695 ns/op
OrdinalMapResize.sizedGet 8 2048 avgt 5 196358.861 ± 15371.176 ns/op
OrdinalMapResize.sizedGet 8 8192 avgt 5 1250185.898 ± 337449.745 ns/op
OrdinalMapResize.sizedGet 8 32768 avgt 5 7569535.480 ± 768681.941 ns/op
OrdinalMapResize.sizedGet 8 131072 avgt 5 34166531.026 ± 2601826.357 ns/op
OrdinalMapResize.sizedGet 16 2048 avgt 5 233474.915 ± 4107.039 ns/op
OrdinalMapResize.sizedGet 16 8192 avgt 5 1618315.403 ± 503515.257 ns/op
OrdinalMapResize.sizedGet 16 32768 avgt 5 8423144.202 ± 1531274.146 ns/op
OrdinalMapResize.sizedGet 16 131072 avgt 5 37380396.196 ± 3756140.945 ns/op
OrdinalMapResize.sizedGet 32 2048 avgt 5 291975.497 ± 30660.463 ns/op
OrdinalMapResize.sizedGet 32 8192 avgt 5 1930925.813 ± 162060.695 ns/op
OrdinalMapResize.sizedGet 32 32768 avgt 5 9944128.175 ± 2468853.190 ns/op
OrdinalMapResize.sizedGet 32 131072 avgt 5 43925098.941 ± 3336023.251 ns/op
OrdinalMapResize.sizedGet 64 2048 avgt 5 423357.848 ± 60510.201 ns/op
OrdinalMapResize.sizedGet 64 8192 avgt 5 2553086.582 ± 298923.441 ns/op
OrdinalMapResize.sizedGet 64 32768 avgt 5 12388266.041 ± 4230163.664 ns/op
OrdinalMapResize.sizedGet 64 131072 avgt 5 55145939.840 ± 6016040.570 ns/op
OrdinalMapResize.sizedGet 128 2048 avgt 5 687408.861 ± 39354.473 ns/op
OrdinalMapResize.sizedGet 128 8192 avgt 5 4023545.068 ± 124931.913 ns/op
OrdinalMapResize.sizedGet 128 32768 avgt 5 17758789.247 ± 5952902.676 ns/op
OrdinalMapResize.sizedGet 128 131072 avgt 5 77575714.649 ± 10985599.044 ns/op
*/
| 9,415 |
0 |
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core
|
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/read/ReadWriteStateEngineTest.java
|
package com.netflix.hollow.core.read;
import com.netflix.hollow.core.read.engine.HollowBlobReader;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import com.netflix.hollow.core.write.HollowBlobWriter;
import com.netflix.hollow.core.write.HollowObjectTypeWriteState;
import com.netflix.hollow.core.write.HollowObjectWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
@State(Scope.Thread)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 5, time = 1)
@Measurement(iterations = 5, time = 1)
@Fork(1)
public class ReadWriteStateEngineTest {
//@Param( {"1000", "10000", "100000", "1000000"})
@Param( {"1000000"})
private int n;
private HollowWriteStateEngine writeEngine;
private HollowObjectTypeWriteState typeWriteState;
private HollowObjectSchema schema;
static Logger BLOB_READER_LOGGER = Logger.getLogger("com.netflix.hollow.core.read.engine.HollowBlobReader");
@Setup
public void setUp() {
BLOB_READER_LOGGER.setLevel(Level.OFF);
this.writeEngine = new HollowWriteStateEngine();
this.schema = new HollowObjectSchema("TestObject", 2);
schema.addField("f1", HollowObjectSchema.FieldType.INT);
schema.addField("f2", HollowObjectSchema.FieldType.STRING);
this.typeWriteState = new HollowObjectTypeWriteState(schema);
writeEngine.addTypeState(typeWriteState);
for (int i = 0; i < n; i++) {
addRecord(1, Integer.toString(i));
}
}
private void addRecord(int f1, String f2) {
HollowObjectWriteRecord rec = new HollowObjectWriteRecord(schema);
rec.setInt("f1", f1);
rec.setString("f2", f2);
writeEngine.add("TestObject", rec);
}
// Reads
@Benchmark
public HollowReadStateEngine roundtripPipe() {
return roundTripSnapshotPipe(writeEngine);
}
@Benchmark
public HollowReadStateEngine roundtripPipeBuffered() {
return roundTripSnapshotPipeBuffered(writeEngine);
}
@Benchmark
public HollowReadStateEngine roundtripMemory() throws IOException {
return roundTripSnapshotMemory(writeEngine);
}
@Benchmark
public HollowReadStateEngine roundtripFile() throws IOException {
return roundTripSnapshotFile(writeEngine);
}
private static HollowReadStateEngine roundTripSnapshotPipe(HollowWriteStateEngine writeEngine) {
HollowBlobWriter writer = new HollowBlobWriter(writeEngine);
HollowReadStateEngine removedRecordCopies = new HollowReadStateEngine();
HollowBlobReader reader = new HollowBlobReader(removedRecordCopies);
// Use a pipe to write and read concurrently to avoid writing
// to temporary files or allocating memory
// @@@ for small states it's more efficient to sequentially write to
// and read from a byte array but it is tricky to estimate the size
SimultaneousExecutor executor = new SimultaneousExecutor(1, ReadWriteStateEngineTest.class, "test");
Exception pipeException = null;
// Ensure read-side is closed after completion of read
try (PipedInputStream in = new PipedInputStream(1 << 15)) {
PipedOutputStream out = new PipedOutputStream(in);
executor.execute(() -> {
// Ensure write-side is closed after completion of write
try (Closeable ac = out) {
writer.writeSnapshot(out);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
reader.readSnapshot(HollowBlobInput.serial(in));
} catch (Exception e) {
pipeException = e;
}
// Ensure no underlying writer exception is lost due to broken pipe
try {
executor.awaitSuccessfulCompletion();
} catch (InterruptedException | ExecutionException e) {
if (pipeException == null) {
throw new RuntimeException(e);
}
pipeException.addSuppressed(e);
}
if (pipeException != null) {
throw new RuntimeException(pipeException);
}
return removedRecordCopies;
}
private static HollowReadStateEngine roundTripSnapshotPipeBuffered(HollowWriteStateEngine writeEngine) {
HollowBlobWriter writer = new HollowBlobWriter(writeEngine);
HollowReadStateEngine removedRecordCopies = new HollowReadStateEngine();
HollowBlobReader reader = new HollowBlobReader(removedRecordCopies);
// Use a pipe to write and read concurrently to avoid writing
// to temporary files or allocating memory
// @@@ for small states it's more efficient to sequentially write to
// and read from a byte array but it is tricky to estimate the size
SimultaneousExecutor executor = new SimultaneousExecutor(1, ReadWriteStateEngineTest.class, "test");
Exception pipeException = null;
// Ensure read-side is closed after completion of read
try (PipedInputStream in = new PipedInputStream(1 << 15)) {
PipedOutputStream out = new PipedOutputStream(in);
executor.execute(() -> {
// Ensure write-side is closed after completion of write
try (Closeable ac = out) {
writer.writeSnapshot(new BufferedOutputStream(out));
} catch (IOException e) {
throw new RuntimeException(e);
}
});
reader.readSnapshot(HollowBlobInput.serial(new BufferedInputStream(in)));
} catch (Exception e) {
pipeException = e;
}
// Ensure no underlying writer exception is lost due to broken pipe
try {
executor.awaitSuccessfulCompletion();
} catch (InterruptedException | ExecutionException e) {
if (pipeException == null) {
throw new RuntimeException(e);
}
pipeException.addSuppressed(e);
}
if (pipeException != null) {
throw new RuntimeException(pipeException);
}
return removedRecordCopies;
}
public static HollowReadStateEngine roundTripSnapshotMemory(HollowWriteStateEngine writeEngine) throws IOException {
HollowBlobWriter writer = new HollowBlobWriter(writeEngine);
HollowReadStateEngine readEngine = new HollowReadStateEngine();
HollowBlobReader reader = new HollowBlobReader(readEngine);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
writer.writeSnapshot(baos);
reader.readSnapshot(HollowBlobInput.serial(baos.toByteArray()));
return readEngine;
}
public static HollowReadStateEngine roundTripSnapshotFile(HollowWriteStateEngine writeEngine) throws IOException {
HollowBlobWriter writer = new HollowBlobWriter(writeEngine);
HollowReadStateEngine readEngine = new HollowReadStateEngine();
HollowBlobReader reader = new HollowBlobReader(readEngine);
File f = File.createTempFile("snapshot", null);
try (BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(f))) {
writer.writeSnapshot(out);
out.flush();
}
try (HollowBlobInput in = HollowBlobInput.serial(new BufferedInputStream(new FileInputStream(f)))) {
reader.readSnapshot(in);
}
return readEngine;
}
}
/*
Benchmark (n) Mode Cnt Score Error Units
ReadWriteStateEngineTest.roundtripFile 1000 avgt 5 0.543 ± 0.200 ms/op
ReadWriteStateEngineTest.roundtripFile 10000 avgt 5 1.838 ± 0.307 ms/op
ReadWriteStateEngineTest.roundtripFile 100000 avgt 5 13.867 ± 0.589 ms/op
ReadWriteStateEngineTest.roundtripFile 1000000 avgt 5 148.361 ± 12.882 ms/op
ReadWriteStateEngineTest.roundtripMemory 1000 avgt 5 0.165 ± 0.009 ms/op
ReadWriteStateEngineTest.roundtripMemory 10000 avgt 5 1.170 ± 0.168 ms/op
ReadWriteStateEngineTest.roundtripMemory 100000 avgt 5 11.469 ± 0.442 ms/op
ReadWriteStateEngineTest.roundtripMemory 1000000 avgt 5 126.531 ± 23.077 ms/op
ReadWriteStateEngineTest.roundtripPipe 1000 avgt 5 0.299 ± 0.016 ms/op
ReadWriteStateEngineTest.roundtripPipe 10000 avgt 5 1.709 ± 0.196 ms/op
ReadWriteStateEngineTest.roundtripPipe 100000 avgt 5 15.217 ± 0.255 ms/op
ReadWriteStateEngineTest.roundtripPipe 1000000 avgt 5 188.362 ± 17.491 ms/op
ReadWriteStateEngineTest.roundtripPipeBuffered 1000 avgt 5 0.257 ± 0.034 ms/op
ReadWriteStateEngineTest.roundtripPipeBuffered 10000 avgt 5 1.364 ± 0.188 ms/op
ReadWriteStateEngineTest.roundtripPipeBuffered 100000 avgt 5 11.595 ± 0.365 ms/op
ReadWriteStateEngineTest.roundtripPipeBuffered 1000000 avgt 5 122.227 ± 14.853 ms/op
# Large snapshot, about 100MB in size
ReadWriteStateEngineTest.roundtripFile 10000000 avgt 5 1418.340 ± 244.367 ms/op
ReadWriteStateEngineTest.roundtripMemory 10000000 avgt 5 1298.582 ± 149.978 ms/op
ReadWriteStateEngineTest.roundtripPipe 10000000 avgt 5 1748.863 ± 128.384 ms/op
ReadWriteStateEngineTest.roundtripPipeBuffered 10000000 avgt 5 1255.351 ± 123.275 ms/op
# Using buffering with the pipes will result in less calls to the piped streams
# Here is a stack sampling profile (using -p stack) for roundtripPipe
20.7% 51.0% java.lang.Thread.isAlive
9.7% 24.0% com.netflix.hollow.core.memory.SegmentedByteArray.copy
6.4% 15.9% com.netflix.hollow.core.write.HollowObjectTypeWriteState.addRecord
2.1% 5.3% com.netflix.hollow.core.memory.ByteArrayOrdinalMap.maxOrdinal
0.6% 1.5% java.lang.Object.wait
0.4% 1.0% com.netflix.hollow.core.write.HollowObjectTypeWriteState.calculateSnapshot
0.1% 0.3% com.netflix.hollow.core.memory.pool.WastefulRecycler.getLongArray
0.1% 0.1% com.netflix.hollow.core.memory.pool.RecyclingRecycler$1.create
0.1% 0.1% com.netflix.hollow.core.memory.SegmentedByteArray.readFrom
0.1% 0.1% java.lang.Object.notifyAll
0.2% 0.6% <other>
# Here is a profile (using -p stack) for roundtripPipeBuffered
12.4% 32.8% com.netflix.hollow.core.memory.SegmentedByteArray.copy
8.6% 22.8% com.netflix.hollow.core.write.HollowObjectTypeWriteState.addRecord
6.2% 16.3% java.lang.Thread.isAlive
2.8% 7.4% com.netflix.hollow.core.memory.ByteArrayOrdinalMap.maxOrdinal
2.0% 5.4% com.netflix.hollow.core.memory.SegmentedByteArray.readFrom
1.9% 5.1% java.io.DataInputStream.readFully
1.2% 3.2% java.io.DataOutputStream.writeLong
0.9% 2.5% java.lang.Object.wait
0.7% 1.8% com.netflix.hollow.core.write.HollowObjectTypeWriteState.calculateSnapshot
0.3% 0.9% com.netflix.hollow.core.memory.SegmentedLongArray.readFrom
0.8% 2.0% <other>
# We can observe that there are less calls to Thread.isAlive when buffering is performed,
# since more reads/writes are performed in bulk resulting in calls to the piped streams.
# Thread.isAlive is a native method and is not intrinsic to HotSpot (meaning there is no special
# treatment of this native call by the runtime compiler) therefore it is expensive to call (unlike that of say
# Thread.currentThread, which is intrinsic). This likely explains the majority of the performance difference.
# Another aspect is likely contention with the reader thread waiting on the writer thread to write bytes and
# vice versa.
#
# Note: from OpenJDK 9 onwards most intrinsic methods are annotated with the @HotSpotIntrinsic annotation.
*/
| 9,416 |
0 |
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/read/engine
|
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/read/engine/object/HollowObjectTypeReadStateShardBenchmark.java
|
package com.netflix.hollow.core.read.engine.object;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.util.StateEngineRoundTripper;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.objectmapper.HollowObjectMapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
@State(Scope.Thread)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 10, time = 1)
@Measurement(iterations = 10, time = 1)
@Fork(1)
public class HollowObjectTypeReadStateShardBenchmark {
HollowWriteStateEngine writeStateEngine;
HollowReadStateEngine readStateEngine;
HollowObjectTypeDataAccess dataAccess;
HollowObjectMapper objectMapper;
@Param({ "500" })
int countStrings;
@Param({ "100000" })
int countStringsDb;
ArrayList<Integer> readOrder;
@Param({ "5", "25", "50", "150", "1000" })
int maxStringLength;
@Param({ "10" })
int probabilityUnicode;
@Setup
public void setUp() throws IOException {
writeStateEngine = new HollowWriteStateEngine();
objectMapper = new HollowObjectMapper(writeStateEngine);
objectMapper.initializeTypeState(String.class);
Random r = new Random();
for (int i = 0; i < countStringsDb; i++) {
StringBuilder sb = new StringBuilder();
sb.append("string_");
sb.append(i);
sb.append("_");
int thisStringLength = r.nextInt(maxStringLength) - sb.length() + 1;
for (int j = 0; j < thisStringLength; j++) {
if (r.nextInt(100) < probabilityUnicode) {
sb.append("\u123E");
} else {
sb.append((char) (r.nextInt(26) + 'a'));
}
}
objectMapper.add(sb.toString());
}
readOrder = new ArrayList<>(countStrings);
for (int i = 0; i < countStrings; i++) {
readOrder.add(r.nextInt(countStringsDb));
}
readStateEngine = new HollowReadStateEngine();
StateEngineRoundTripper.roundTripSnapshot(writeStateEngine, readStateEngine, null);
dataAccess = (HollowObjectTypeDataAccess) readStateEngine.getTypeDataAccess("String", 0);
}
@Benchmark
public void testReadString(Blackhole bh) {
for (int j : readOrder) {
String result = dataAccess.readString(j, 0);
//System.out.println(result);
bh.consume(result);
}
}
}
| 9,417 |
0 |
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/read/engine
|
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/read/engine/object/HollowObjectTypeReadStateDeltaTransitionBenchmark.java
|
package com.netflix.hollow.core.read.engine.object;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.util.StateEngineRoundTripper;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.objectmapper.HollowObjectMapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
@State(Scope.Thread)
@BenchmarkMode({Mode.All})
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@Warmup(iterations = 1, time = 1)
@Measurement(iterations = 15, time = 1)
@Fork(1)
/**
* Runs delta transitions in the background while benchmarking reads. Re-sharding in delta transitions can be toggled with a param.
*/
public class HollowObjectTypeReadStateDeltaTransitionBenchmark {
HollowWriteStateEngine writeStateEngine;
HollowReadStateEngine readStateEngine;
HollowObjectTypeDataAccess dataAccess;
HollowObjectMapper objectMapper;
int countStringsToRead = 500;
@Param({ "true" })
boolean isReshardingEnabled;
@Param({ "500", "1000" })
int shardSizeKBs;
@Param({ "5", "100" })
int maxStringLength;
int countStringsDb = 100000;
int deltaChanges = 2000;
ArrayList<Integer> readOrder;
ExecutorService refreshExecutor;
Future<?> reshardingFuture;
CountDownLatch doneBenchmark;
final Random r = new Random();
@Setup(Level.Iteration)
public void setUp() throws ExecutionException, InterruptedException {
final List<String> readStrings = new ArrayList<>();
final Set<Integer> readKeys = new HashSet<>();
refreshExecutor = Executors.newSingleThreadExecutor();
refreshExecutor.submit(() -> {
writeStateEngine = new HollowWriteStateEngine();
writeStateEngine.setTargetMaxTypeShardSize((long) shardSizeKBs * 1000l);
objectMapper = new HollowObjectMapper(writeStateEngine);
objectMapper.initializeTypeState(String.class);
readOrder = new ArrayList<>(countStringsToRead);
for (int i = 0; i < countStringsToRead; i++) {
readOrder.add(r.nextInt(countStringsDb));
}
readKeys.addAll(readOrder);
for (int i = 0; i < countStringsDb; i++) {
StringBuilder sb = new StringBuilder();
sb.append("string_");
sb.append(i);
sb.append("_");
int thisStringLength = r.nextInt(maxStringLength) - sb.length() + 1;
for (int j = 0; j < thisStringLength; j++) {
sb.append((char) (r.nextInt(26) + 'a'));
}
String s = sb.toString();
objectMapper.add(s);
if (readKeys.contains(i)) {
readStrings.add(s);
}
}
readStateEngine = new HollowReadStateEngine();
try {
StateEngineRoundTripper.roundTripSnapshot(writeStateEngine, readStateEngine, null);
} catch (IOException e) {
throw new RuntimeException(e);
}
dataAccess = (HollowObjectTypeDataAccess) readStateEngine.getTypeDataAccess("String", 0);
}).get();
doneBenchmark = new CountDownLatch(1);
reshardingFuture = refreshExecutor.submit(() -> {
Random r = new Random();
long origShardSize = shardSizeKBs * 1000l;
long newShardSize = origShardSize;
do {
for (int i=0; i<readStrings.size(); i++) {
objectMapper.add(readStrings.get(i));
}
for (int i = 0; i < deltaChanges; i++) {
int changeKey = r.nextInt(countStringsDb);
if (readKeys.contains(changeKey)) {
continue;
}
StringBuilder sb = new StringBuilder();
sb.append("string_");
sb.append(changeKey);
sb.append("_");
int thisStringLength = r.nextInt(maxStringLength) - sb.length() + 1;
for (int j = 0; j < thisStringLength; j++) {
sb.append((char) (r.nextInt(26) + 'a'));
}
objectMapper.add(sb.toString());
}
try {
if (isReshardingEnabled) {
if (newShardSize == origShardSize) {
newShardSize = origShardSize / 10;
} else {
newShardSize = origShardSize;
}
writeStateEngine.setTargetMaxTypeShardSize(newShardSize);
}
StateEngineRoundTripper.roundTripDelta(writeStateEngine, readStateEngine);
} catch (IOException e) {
throw new RuntimeException(e);
}
} while (doneBenchmark.getCount() > 0);
});
}
@TearDown(Level.Iteration)
public void tearDown() {
doneBenchmark.countDown();
reshardingFuture.cancel(true);
refreshExecutor.shutdown();
try {
if (!refreshExecutor.awaitTermination(1, TimeUnit.SECONDS)) {
refreshExecutor.shutdownNow();
}
} catch (InterruptedException e) {
refreshExecutor.shutdownNow();
Thread.currentThread().interrupt();
}
}
@Benchmark
public void testReadString(Blackhole bh) {
int j = r.nextInt(readOrder.size());
String result = dataAccess.readString(j, 0);
bh.consume(result);
}
}
| 9,418 |
0 |
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core
|
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/index/HollowHashIndexBenchmark.java
|
package com.netflix.hollow.core.index;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.TimeValue;
public class HollowHashIndexBenchmark {
public static class BuildHollowHashIndexBenchmark extends AbstractHollowHashIndexBenchmark {
@Override
protected boolean shouldCreateIndexes() {
return false;
}
@Benchmark
public HollowHashIndex buildIndex() {
return createIndex();
}
}
public static class LoadHollowHashIndexBenchmark extends AbstractHollowHashIndexBenchmark {
@Benchmark
public HollowHashIndexResult findMatches() {
return nextIndex().findMatches(nextKeys());
}
@Benchmark
public HollowHashIndexResult findMatchesMissing() {
return nextIndex().findMatches(missingKeys());
}
}
public static class AbstractHollowHashIndexBenchmark extends AbstractHollowIndexBenchmark<HollowHashIndex> {
//@Param( {"1", "1000", "10000", "100000"})
@Param( {"1000"})
public int cardinality;
@Override
protected int cardinality() {
return cardinality;
}
@Override
public HollowHashIndex createIndex() {
return new HollowHashIndex(readStateEngine, IntType.class.getSimpleName(), "", matchFields);
}
}
public static void main(String[] args) throws RunnerException {
Options opt = new OptionsBuilder()
.include(HollowHashIndexBenchmark.class.getSimpleName())
.warmupIterations(5)
.warmupTime(TimeValue.seconds(1))
.measurementIterations(1)
.measurementTime(TimeValue.seconds(3))
.forks(1)
.build();
new Runner(opt).run();
}
}
| 9,419 |
0 |
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core
|
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/index/AbstractHollowIndexBenchmark.java
|
package com.netflix.hollow.core.index;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.util.StateEngineRoundTripper;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.objectmapper.HollowObjectMapper;
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.logging.LogManager;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
/**
* Abstract benchmark class for Hollow indexes. Uses integer keys to avoid mixing in the overhead of individual hash functions.
*/
@State(Scope.Benchmark)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@Warmup(iterations = 5, time = 1)
@Measurement(iterations = 5, time = 1)
@Fork(1)
public abstract class AbstractHollowIndexBenchmark<T> {
private static final int TARGET_SIZE_MB = 30 * 1024 * 1024;
private static final int ENTRY_OVERHEAD_BYTES = 16;
private HollowWriteStateEngine writeStateEngine = new HollowWriteStateEngine();
private HollowObjectMapper mapper = new HollowObjectMapper(writeStateEngine);
private ThreadLocalRandom random = ThreadLocalRandom.current();
private T[] indexes;
protected HollowReadStateEngine readStateEngine = new HollowReadStateEngine();
protected String[] matchFields;
//@Param( {"1", "10", "100", "1000", "10000", "100000", "1000000"})
@Param( {"1000"})
public int size;
//@Param( {"1", "2", "3", "5", "8"})
@Param( {"1"})
public int querySize = 1;
//@Param( {"false", "true"})
@Param( {"false"})
public boolean nested = false;
@SuppressWarnings("unchecked")
@Setup
public void setup() throws IOException {
LogManager.getLogManager().reset();
for (int i = 0; i < size; i++) {
// Make field values unique for a given object, otherwise index build performance worsens significantly
int key = getKey(8 * i);
mapper.add(new IntType(key, new NestedIntType(key)));
}
StateEngineRoundTripper.roundTripSnapshot(writeStateEngine, readStateEngine);
matchFields = new String[querySize];
for (int i = 0; i < querySize; i++) {
int fieldNum = i + 1;
String fieldName = "field" + fieldNum;
matchFields[i] = nested ? "nested." + fieldName : fieldName;
}
if (!shouldCreateIndexes()) {
return;
}
int length = TARGET_SIZE_MB / (ENTRY_OVERHEAD_BYTES * size);
T index = createIndex();
indexes = (T[]) Array.newInstance(index.getClass(), length);
for (int i = 0; i < indexes.length; i++) {
indexes[i] = createIndex();
}
}
protected boolean shouldCreateIndexes() {
return true;
}
protected T nextIndex() {
return indexes[random.nextInt(indexes.length)];
}
protected Object[] nextKeys() {
int key = getKey(random.nextInt(size));
Integer[] keys = new Integer[querySize];
for (int i = 0; i < querySize; i++) {
keys[i] = key + i;
}
return keys;
}
protected Object[] missingKeys() {
Integer[] keys = new Integer[querySize];
for (int i = 0; i < querySize; i++) {
keys[i] = -1;
}
return keys;
}
protected abstract T createIndex();
protected int cardinality() {
return 1;
}
protected int getKey(int key) {
int cardinality = cardinality();
return key - key % cardinality;
}
protected static class IntType {
private int field1;
private int field2;
private int field3;
private int field4;
private int field5;
private int field6;
private int field7;
private int field8;
private NestedIntType nested;
public IntType(int i, NestedIntType nested) {
this.field1 = i;
this.field2 = i + 1;
this.field3 = i + 2;
this.field4 = i + 3;
this.field5 = i + 4;
this.field6 = i + 5;
this.field7 = i + 6;
this.field8 = i + 7;
this.nested = nested;
}
}
private static class NestedIntType {
private int field1;
private int field2;
private int field3;
private int field4;
private int field5;
private int field6;
private int field7;
private int field8;
public NestedIntType(int i) {
this.field1 = i;
this.field2 = i + 1;
this.field3 = i + 2;
this.field4 = i + 3;
this.field5 = i + 4;
this.field6 = i + 5;
this.field7 = i + 6;
this.field8 = i + 7;
}
}
}
| 9,420 |
0 |
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/index
|
Create_ds/hollow/hollow-perf/src/jmh/java/com/netflix/hollow/core/index/key/HollowPrimaryKeyIndexBenchmark.java
|
package com.netflix.hollow.core.index.key;
import com.netflix.hollow.core.index.AbstractHollowIndexBenchmark;
import com.netflix.hollow.core.index.HollowPrimaryKeyIndex;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.TimeValue;
public class HollowPrimaryKeyIndexBenchmark {
public static class BuildHollowPrimaryKeyIndexBenchmark extends AbstractHollowPrimaryKeyIndexBenchmark {
@Override
protected boolean shouldCreateIndexes() {
return false;
}
@Benchmark
public HollowPrimaryKeyIndex buildIndex() {
return createIndex();
}
}
public static class LoadHollowPrimaryKeyIndexBenchmark extends AbstractHollowPrimaryKeyIndexBenchmark {
@Benchmark
public int getMatchingOrdinal() {
return nextIndex().getMatchingOrdinal(nextKeys());
}
@Benchmark
public int getMatchingOrdinalMissing() {
return nextIndex().getMatchingOrdinal(missingKeys());
}
}
public static class AbstractHollowPrimaryKeyIndexBenchmark
extends AbstractHollowIndexBenchmark<HollowPrimaryKeyIndex> {
@Override
public HollowPrimaryKeyIndex createIndex() {
return new HollowPrimaryKeyIndex(readStateEngine, IntType.class.getSimpleName(), matchFields);
}
}
public static void main(String[] args) throws RunnerException {
Options opt = new OptionsBuilder()
.include(HollowPrimaryKeyIndexBenchmark.class.getSimpleName())
.warmupIterations(5)
.warmupTime(TimeValue.seconds(1))
.measurementIterations(1)
.measurementTime(TimeValue.seconds(3))
.forks(1)
.build();
new Runner(opt).run();
}
}
| 9,421 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/test/java/com/netflix/hollow/jsonadapter
|
Create_ds/hollow/hollow-jsonadapter/src/test/java/com/netflix/hollow/jsonadapter/chunker/JsonArrayChunkerTest.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.chunker;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import java.io.StringReader;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.junit.Test;
public class JsonArrayChunkerTest {
@Test
public void test() throws Exception {
String jsonArray = "[ { \"f1\\\"\" : \"value1\", \"f2\" : { \"f1.1\" : \"hel}}{{{{lo \\\"w{orld\\\"\" } } , { \"obj2\" : \"f2.1\" } ]";
JsonArrayChunker chunker = new JsonArrayChunker(new StringReader(jsonArray), new SimultaneousExecutor(getClass(), "test"), 4);
chunker.initialize();
String obj1 = IOUtils.toString(chunker.nextChunk());
String obj2 = IOUtils.toString(chunker.nextChunk());
Assert.assertEquals("{ \"f1\\\"\" : \"value1\", \"f2\" : { \"f1.1\" : \"hel}}{{{{lo \\\"w{orld\\\"\" } }", obj1);
Assert.assertEquals("{ \"obj2\" : \"f2.1\" }", obj2);
}
}
| 9,422 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/AbstractHollowJsonAdaptorTask.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import com.netflix.hollow.jsonadapter.chunker.JsonArrayChunker;
import com.netflix.hollow.jsonadapter.field.FieldProcessor;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
public abstract class AbstractHollowJsonAdaptorTask {
public static boolean isDebug = false;
protected final int maxWorkQueue = 2048;
protected final SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "json-adaptor");
protected final String typeName;
protected final String actionName;
protected final Map<String, Map<String, FieldProcessor>> fieldProcessors;
public AbstractHollowJsonAdaptorTask(String typeName) {
this(typeName, null);
}
public AbstractHollowJsonAdaptorTask(String typeName, String actionName) {
this.typeName = typeName;
this.actionName = actionName;
this.fieldProcessors = new HashMap<String, Map<String,FieldProcessor>>();
}
public String getTypeName() {
return typeName;
}
public void addFieldProcessor(FieldProcessor... processors) {
for (FieldProcessor p : processors) {
Map<String, FieldProcessor> entityFieldProcessors = fieldProcessors.get(p.getEntityName());
if(entityFieldProcessors == null) {
entityFieldProcessors = new HashMap<String, FieldProcessor>();
fieldProcessors.put(p.getEntityName(), entityFieldProcessors);
}
entityFieldProcessors.put(p.getFieldName(), p);
}
}
public FieldProcessor getFieldProcessor(String entityName, String fieldName) {
Map<String, FieldProcessor> entityFieldProcessors = fieldProcessors.get(entityName);
if(entityFieldProcessors == null)
return null;
return entityFieldProcessors.get(fieldName);
}
protected void processFile(File f, int maxSample) throws Exception {
processFile(new FileReader(f), maxSample);
}
///TODO: Many parse failures can cause out of memory errors.
protected void processFile(Reader r, int maxSample) throws Exception {
JsonArrayChunker chunker = new JsonArrayChunker(r, executor);
chunker.initialize();
int counter = 0;
Reader jsonObj = chunker.nextChunk();
while(jsonObj != null && counter < maxSample) {
final Reader currentObject = jsonObj;
executor.execute(new Runnable() {
public void run() {
try {
JsonFactory factory = new JsonFactory();
JsonParser parser = factory.createParser(currentObject);
processRecord(parser);
} catch(Exception e){
throw new RuntimeException(e);
}
}
});
while(executor.getQueue().size() > maxWorkQueue) {
Thread.sleep(5);
}
counter++;
jsonObj.close();
jsonObj = chunker.nextChunk();
}
executor.awaitSuccessfulCompletion();
}
protected abstract int processRecord(JsonParser parser) throws IOException;
protected boolean wait(List<Future<?>> futureList) throws Exception {
boolean isSuccess = false;
for (final Future<?> f : futureList) {
try {
f.get();
isSuccess = true;
} catch (final InterruptedException e) {
e.printStackTrace();
} catch (final ExecutionException e) {
e.printStackTrace();
throw e;
}
}
return isSuccess;
}
}
| 9,423 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/HollowJsonAdapterPrimaryKeyFinder.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchema.SchemaType;
import java.io.IOException;
import java.io.StringReader;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/**
* Extracts just a user-defined primary key from a json record
*/
public class HollowJsonAdapterPrimaryKeyFinder {
private final Map<String, HollowSchema> hollowSchemas;
private final String typeName;
private final Object[] keyElementArray;
private final Map<String, Integer> keyFieldPathPositions;
public HollowJsonAdapterPrimaryKeyFinder(Collection<HollowSchema> schemas, String typeName, String... keyFieldPaths) {
this.hollowSchemas = new HashMap<String, HollowSchema>();
this.typeName = typeName;
for(HollowSchema schema : schemas) {
hollowSchemas.put(schema.getName(), schema);
}
this.keyElementArray = new Object[keyFieldPaths.length];
this.keyFieldPathPositions = new HashMap<String, Integer>();
for(int i=0;i<keyFieldPaths.length;i++) {
keyFieldPathPositions.put(keyFieldPaths[i], Integer.valueOf(i));
}
}
public Object[] findKey(String json) throws IOException {
JsonFactory factory = new JsonFactory();
JsonParser parser = factory.createParser(new StringReader(json));
return Arrays.copyOf(findKey(parser), keyElementArray.length);
}
public Object[] findKey(JsonParser parser) throws IOException {
parser.nextToken();
HollowObjectSchema schema = (HollowObjectSchema)hollowSchemas.get(typeName);
addObject(parser, schema, new StringBuilder());
return keyElementArray;
}
private void addObject(JsonParser parser, HollowObjectSchema schema, StringBuilder currentFieldPath) throws IOException {
JsonToken token = parser.nextToken();
String fieldName = null;
try {
while (token != JsonToken.END_OBJECT) {
fieldName = parser.getCurrentName();
addObjectField(parser, token, schema, fieldName, currentFieldPath);
token = parser.nextToken();
}
} catch (Exception ex) {
throw new IOException("Failed to parse field=" + fieldName + ", schema=" + schema.getName() + ", token=" + token, ex);
}
}
private void addObjectField(JsonParser parser, JsonToken token, HollowObjectSchema schema, String fieldName, StringBuilder currentFieldPath) throws IOException {
if(token != JsonToken.FIELD_NAME) {
int fieldPosition = schema.getPosition(fieldName);
if(fieldPosition == -1) {
skipObjectField(parser, token);
} else {
int parentFieldPathLength = currentFieldPath.length();
if(parentFieldPathLength > 0)
currentFieldPath.append(".");
currentFieldPath.append(fieldName);
Integer keyFieldPosition = keyFieldPathPositions.get(currentFieldPath.toString());
switch(token) {
case START_ARRAY:
skipSubArray(parser);
break;
case START_OBJECT:
String referencedType = schema.getReferencedType(fieldName);
HollowSchema referencedSchema = hollowSchemas.get(referencedType);
if(referencedSchema.getSchemaType() == SchemaType.OBJECT)
addObject(parser, (HollowObjectSchema)referencedSchema, currentFieldPath);
else
skipObject(parser);
break;
case VALUE_FALSE:
case VALUE_TRUE:
case VALUE_NUMBER_INT:
case VALUE_NUMBER_FLOAT:
case VALUE_STRING:
switch(schema.getFieldType(fieldPosition)) {
case BOOLEAN:
if(keyFieldPosition != null)
keyElementArray[keyFieldPosition.intValue()] = Boolean.valueOf(parser.getBooleanValue());
break;
case INT:
if(keyFieldPosition != null)
keyElementArray[keyFieldPosition.intValue()] = Integer.valueOf(parser.getIntValue());
break;
case LONG:
if(keyFieldPosition != null)
keyElementArray[keyFieldPosition.intValue()] = Long.valueOf(parser.getLongValue());
break;
case DOUBLE:
if(keyFieldPosition != null)
keyElementArray[keyFieldPosition.intValue()] = Double.valueOf(parser.getDoubleValue());
break;
case FLOAT:
if(keyFieldPosition != null)
keyElementArray[keyFieldPosition.intValue()] = Float.valueOf(parser.getFloatValue());
break;
case STRING:
if(keyFieldPosition != null)
keyElementArray[keyFieldPosition.intValue()] = parser.getValueAsString();
break;
case REFERENCE:
if(keyFieldPosition != null)
throw new IllegalStateException("Key elements must not be REFERENCE");
HollowObjectSchema subSchema = (HollowObjectSchema) hollowSchemas.get(schema.getReferencedType(fieldPosition));
currentFieldPath.append(".").append(subSchema.getFieldName(0));
keyFieldPosition = keyFieldPathPositions.get(currentFieldPath.toString());
if(keyFieldPosition != null) {
switch(subSchema.getFieldType(0)) {
case BOOLEAN:
if(keyFieldPosition != null)
keyElementArray[keyFieldPosition.intValue()] = Boolean.valueOf(parser.getBooleanValue());
break;
case INT:
if(keyFieldPosition != null)
keyElementArray[keyFieldPosition.intValue()] = Integer.valueOf(parser.getIntValue());
break;
case LONG:
if(keyFieldPosition != null)
keyElementArray[keyFieldPosition.intValue()] = Long.valueOf(parser.getLongValue());
break;
case DOUBLE:
if(keyFieldPosition != null)
keyElementArray[keyFieldPosition.intValue()] = Double.valueOf(parser.getDoubleValue());
break;
case FLOAT:
if(keyFieldPosition != null)
keyElementArray[keyFieldPosition.intValue()] = Float.valueOf(parser.getFloatValue());
break;
case STRING:
if(keyFieldPosition != null)
keyElementArray[keyFieldPosition.intValue()] = parser.getValueAsString();
break;
case REFERENCE:
throw new IllegalStateException("Key elements must not be REFERENCE");
default:
}
}
default:
}
case VALUE_NULL:
break;
default:
}
currentFieldPath.setLength(parentFieldPathLength);
}
}
}
private void skipObject(JsonParser parser) throws IOException {
JsonToken token = parser.nextToken();
try {
while (token != JsonToken.END_OBJECT) {
skipObjectField(parser, token);
token = parser.nextToken();
}
} catch (Exception ex) {
throw new IOException(ex);
}
}
private void skipSubArray(JsonParser parser) throws IOException {
JsonToken token = parser.nextToken();
while(token != JsonToken.END_ARRAY) {
if(token == JsonToken.START_OBJECT) {
skipObject(parser);
} else {
skipObjectField(parser, token);
}
token = parser.nextToken();
}
}
private void skipObjectField(JsonParser parser, JsonToken token) throws IOException {
switch(token) {
case START_ARRAY:
skipSubArray(parser);
break;
case START_OBJECT:
skipObject(parser);
break;
case VALUE_FALSE:
case VALUE_TRUE:
case VALUE_NUMBER_INT:
case VALUE_NUMBER_FLOAT:
case VALUE_STRING:
case VALUE_NULL:
default:
}
}
}
| 9,424 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/ObjectMappedFieldPath.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter;
import com.netflix.hollow.core.write.HollowObjectWriteRecord;
import com.netflix.hollow.jsonadapter.field.FieldProcessor;
public class ObjectMappedFieldPath {
private final HollowObjectWriteRecord rec;
private final String unmappedTypeName;
private final String unmappedFieldName;
private final String fieldName;
private final int fieldPosition;
private FieldProcessor fieldProcessor;
public ObjectMappedFieldPath(HollowObjectWriteRecord rec, String fieldName, String unmappedTypeName, String unmappedFieldName, int fieldPosition, FieldProcessor fieldProcessor) {
this.rec = rec;
this.unmappedTypeName = unmappedTypeName;
this.unmappedFieldName = unmappedFieldName;
this.fieldName = fieldName;
this.fieldPosition = fieldPosition;
this.fieldProcessor = fieldProcessor;
}
public HollowObjectWriteRecord getWriteRecord() {
return rec;
}
public String getTypeName() {
return rec.getSchema().getName();
}
public String getFieldName() {
return fieldName;
}
public String getUnmappedTypeName() {
return unmappedTypeName;
}
public String getUnmappedFieldName() {
return unmappedFieldName;
}
public int getFieldPosition() {
return fieldPosition;
}
public FieldProcessor getFieldProcessor() {
return fieldProcessor;
}
public void setFieldProcessor(FieldProcessor fieldProcessor) {
this.fieldProcessor = fieldProcessor;
}
}
| 9,425 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/HollowJsonToFlatRecordTask.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter;
import com.fasterxml.jackson.core.JsonParser;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecord;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter;
import com.netflix.hollow.core.write.objectmapper.flatrecords.HollowSchemaIdentifierMapper;
import java.io.IOException;
import java.io.Reader;
import java.util.function.Consumer;
public class HollowJsonToFlatRecordTask extends AbstractHollowJsonAdaptorTask {
private final HollowJsonAdapter adapter;
private final HollowSchemaIdentifierMapper schemaIdMapper;
private final Consumer<FlatRecord> action;
private final ThreadLocal<FlatRecordWriter> flatRecordWriter;
public HollowJsonToFlatRecordTask(HollowJsonAdapter adapter,
HollowSchemaIdentifierMapper schemaIdMapper,
Consumer<FlatRecord> action) {
super(adapter.getTypeName());
this.adapter = adapter;
this.schemaIdMapper = schemaIdMapper;
this.flatRecordWriter = new ThreadLocal<>();
this.action = action;
}
public void process(Reader jsonReader) throws Exception {
processFile(jsonReader, Integer.MAX_VALUE);
}
@Override
protected int processRecord(JsonParser parser) throws IOException {
FlatRecordWriter recWriter = getFlatRecordWriter();
int ordinal = adapter.processRecord(parser, recWriter);
FlatRecord rec = recWriter.generateFlatRecord();
action.accept(rec);
return ordinal;
}
private FlatRecordWriter getFlatRecordWriter() {
FlatRecordWriter writer = flatRecordWriter.get();
if(writer == null) {
writer = new FlatRecordWriter(adapter.stateEngine, schemaIdMapper);
flatRecordWriter.set(writer);
}
writer.reset();
return writer;
}
}
| 9,426 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/ObjectFieldMapping.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.write.HollowObjectTypeWriteState;
import com.netflix.hollow.core.write.HollowObjectWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter;
import com.netflix.hollow.jsonadapter.field.FieldProcessor;
import java.util.HashMap;
import java.util.Map;
public class ObjectFieldMapping {
private final HollowWriteStateEngine stateEngine;
private final String typeName;
private final HollowJsonAdapter populator;
private final Map<String, ObjectMappedFieldPath> mappedFieldPaths;
private final RemappingBuilderInstruction rootInstruction;
private final Map<String, HollowObjectWriteRecord> writeRecords;
public ObjectFieldMapping(String typeName, HollowJsonAdapter populator) {
this.stateEngine = populator.stateEngine;
this.typeName = typeName;
this.populator = populator;
HollowObjectTypeWriteState typeState = (HollowObjectTypeWriteState) stateEngine.getTypeState(typeName);
HollowObjectWriteRecord writeRec = new HollowObjectWriteRecord(typeState.getSchema());
this.rootInstruction = new RemappingBuilderInstruction(writeRec, typeState);
this.mappedFieldPaths = new HashMap<String, ObjectMappedFieldPath>();
this.writeRecords = new HashMap<String, HollowObjectWriteRecord>();
mapAllPaths((HollowObjectSchema)stateEngine.getSchema(typeName));
}
private ObjectFieldMapping(String typeName, HollowJsonAdapter populator, Map<String, ObjectMappedFieldPath> mappedFieldPaths, RemappingBuilderInstruction rootInstruction, Map<String, HollowObjectWriteRecord> writeRecords) {
this.stateEngine = populator.stateEngine;
this.typeName = typeName;
this.populator = populator;
this.rootInstruction = rootInstruction;
this.mappedFieldPaths = mappedFieldPaths;
this.writeRecords = writeRecords;
}
public int build(int passthroughOrdinal, FlatRecordWriter flatRecordWriter) {
int ordinal = rootInstruction.executeInstruction(passthroughOrdinal, flatRecordWriter);
for(Map.Entry<String, HollowObjectWriteRecord> entry : writeRecords.entrySet())
entry.getValue().reset();
return ordinal;
}
private void mapAllPaths(HollowObjectSchema schema) {
for(int i=0;i<schema.numFields();i++) {
if(!mappedFieldPaths.containsKey(schema.getFieldName(i))) {
HollowObjectWriteRecord rec = getWriteRecord(schema);
mappedFieldPaths.put(schema.getFieldName(i), new ObjectMappedFieldPath(rec, schema.getFieldName(i), schema.getName(), schema.getFieldName(i), i, populator.getFieldProcessor(schema.getName(), schema.getFieldName(i))));
}
}
}
public void addRemappedPath(String fromFieldName, String... fieldPaths) {
ObjectMappedFieldPath pathMapping = addPathMapping(fromFieldName, fieldPaths, rootInstruction, 0);
mappedFieldPaths.put(fromFieldName, pathMapping);
}
public ObjectMappedFieldPath getSingleFieldMapping() {
return mappedFieldPaths.entrySet().iterator().next().getValue();
}
public ObjectMappedFieldPath getMappedFieldPath(String fieldName) {
return mappedFieldPaths.get(fieldName);
}
public void addFieldProcessor(FieldProcessor fieldProcessor) {
for(Map.Entry<String, ObjectMappedFieldPath> entry : mappedFieldPaths.entrySet()) {
if(fieldProcessor.getEntityName().equals(entry.getValue().getTypeName())
&& fieldProcessor.getFieldName().equals(entry.getValue().getFieldName())) {
entry.getValue().setFieldProcessor(fieldProcessor);
return;
}
if(fieldProcessor.getEntityName().equals(entry.getValue().getUnmappedTypeName())
&& fieldProcessor.getFieldName().equals(entry.getValue().getUnmappedFieldName())) {
entry.getValue().setFieldProcessor(fieldProcessor);
return;
}
}
}
private ObjectMappedFieldPath addPathMapping(String fieldName, String[] fieldPaths, RemappingBuilderInstruction instruction, int idx) {
if(idx < fieldPaths.length - 1) {
RemappingBuilderInstruction childInstruction = instruction.childrenRecs.get(fieldPaths[idx]);
HollowObjectSchema schema = instruction.typeState.getSchema();
String referencedType = schema.getReferencedType(fieldPaths[idx]);
if(childInstruction == null) {
HollowObjectTypeWriteState childTypeState = (HollowObjectTypeWriteState) stateEngine.getTypeState(referencedType);
HollowObjectWriteRecord childWriteRec = getWriteRecord(childTypeState.getSchema());
childInstruction = new RemappingBuilderInstruction(childWriteRec, childTypeState);
instruction.addChildInstruction(fieldPaths[idx], childInstruction);
}
return addPathMapping(fieldName, fieldPaths, childInstruction, idx+1);
}
HollowObjectSchema schema = instruction.rec.getSchema();
String remappedFieldName = fieldPaths[idx];
return new ObjectMappedFieldPath(instruction.rec, remappedFieldName, typeName, fieldName, schema.getPosition(remappedFieldName), findFieldProcessor(typeName, fieldName, schema.getName(), remappedFieldName));
}
private FieldProcessor findFieldProcessor(String typeName, String fieldName, String mappedTypeName, String mappedFieldName) {
FieldProcessor fp = populator.getFieldProcessor(typeName, fieldName);
if(fp != null)
return fp;
return populator.getFieldProcessor(mappedTypeName, mappedFieldName);
}
private HollowObjectWriteRecord getWriteRecord(HollowObjectSchema schema) {
HollowObjectWriteRecord writeRecord = writeRecords.get(schema.getName());
if(writeRecord == null) {
writeRecord = new HollowObjectWriteRecord(schema);
writeRecords.put(schema.getName(), writeRecord);
}
return writeRecord;
}
private class RemappingBuilderInstruction {
private final HollowObjectWriteRecord rec;
private final HollowObjectTypeWriteState typeState;
private final Map<String, RemappingBuilderInstruction> childrenRecs;
public RemappingBuilderInstruction(HollowObjectWriteRecord rec, HollowObjectTypeWriteState typeState) {
this(rec, typeState, new HashMap<String, RemappingBuilderInstruction>());
}
private RemappingBuilderInstruction(HollowObjectWriteRecord rec, HollowObjectTypeWriteState typeState, Map<String, RemappingBuilderInstruction> childrenRecs) {
this.rec = rec;
this.typeState = typeState;
this.childrenRecs = childrenRecs;
}
public void addChildInstruction(String fieldName, RemappingBuilderInstruction instruction) {
childrenRecs.put(fieldName, instruction);
}
public int executeInstruction(int passthroughOrdinal, FlatRecordWriter flatRecordWriter) {
for(Map.Entry<String, RemappingBuilderInstruction> childEntry : childrenRecs.entrySet()) {
int childOrdinal = childEntry.getValue().executeInstruction(-1, flatRecordWriter);
rec.setReference(childEntry.getKey(), childOrdinal);
}
if(passthroughOrdinal != -1)
rec.setReference("passthrough", passthroughOrdinal);
if(flatRecordWriter != null)
return flatRecordWriter.write(typeState.getSchema(), rec);
return typeState.add(rec);
}
public RemappingBuilderInstruction clone(Map<String, HollowObjectWriteRecord> clonedWriteRecords) {
Map<String, RemappingBuilderInstruction> childClones = new HashMap<String, ObjectFieldMapping.RemappingBuilderInstruction>();
for(Map.Entry<String, RemappingBuilderInstruction> childEntry : childrenRecs.entrySet())
childClones.put(childEntry.getKey(), childEntry.getValue().clone(clonedWriteRecords));
HollowObjectWriteRecord clonedRec = clonedWriteRecords.get(rec.getSchema().getName());
return new RemappingBuilderInstruction(clonedRec, typeState, childClones);
}
}
@Override
public ObjectFieldMapping clone() {
Map<String, HollowObjectWriteRecord> clonedWriteRecords = new HashMap<String, HollowObjectWriteRecord>();
for(Map.Entry<String, HollowObjectWriteRecord> recEntry : writeRecords.entrySet()) {
clonedWriteRecords.put(recEntry.getKey(), new HollowObjectWriteRecord(recEntry.getValue().getSchema()));
}
Map<String, ObjectMappedFieldPath> clonedMappedFieldPaths = new HashMap<String, ObjectMappedFieldPath>();
for(Map.Entry<String, ObjectMappedFieldPath> fieldEntry : mappedFieldPaths.entrySet()) {
ObjectMappedFieldPath original = fieldEntry.getValue();
HollowObjectWriteRecord clonedWriteRecord = clonedWriteRecords.get(original.getWriteRecord().getSchema().getName());
clonedMappedFieldPaths.put(fieldEntry.getKey(), new ObjectMappedFieldPath(clonedWriteRecord, original.getFieldName(), original.getUnmappedTypeName(), original.getUnmappedFieldName(), original.getFieldPosition(), original.getFieldProcessor()));
}
return new ObjectFieldMapping(typeName, populator, clonedMappedFieldPaths, rootInstruction.clone(clonedWriteRecords), clonedWriteRecords);
}
}
| 9,427 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/HollowJsonAdapter.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.netflix.hollow.core.schema.HollowCollectionSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.write.HollowListWriteRecord;
import com.netflix.hollow.core.write.HollowMapWriteRecord;
import com.netflix.hollow.core.write.HollowObjectWriteRecord;
import com.netflix.hollow.core.write.HollowSetWriteRecord;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter;
import com.netflix.hollow.jsonadapter.field.FieldProcessor;
import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Populate a HollowWriteStateEngine based on data encoded in JSON.
*/
public class HollowJsonAdapter extends AbstractHollowJsonAdaptorTask {
final HollowWriteStateEngine stateEngine;
private final Map<String, HollowSchema> hollowSchemas;
private final ThreadLocal<Map<String, HollowWriteRecord>> hollowWriteRecordsHolder = new ThreadLocal<Map<String, HollowWriteRecord>>();
private final ThreadLocal<Map<String, ObjectFieldMapping>> objectFieldMappingHolder = new ThreadLocal<Map<String, ObjectFieldMapping>>();
private final Map<String, ObjectFieldMapping> canonicalObjectFieldMappings;
private final Set<String> passthroughDecoratedTypes;
private final ThreadLocal<PassthroughWriteRecords> passthroughRecords;
/// TODO: Would be nice to be able to take a HollowDataset here, if only producing FlatRecords,
/// instead of requiring a HollowWriteStateEngine
public HollowJsonAdapter(HollowWriteStateEngine stateEngine, String typeName) {
super(typeName, "populate");
this.stateEngine = stateEngine;
this.hollowSchemas = new HashMap<String, HollowSchema>();
this.canonicalObjectFieldMappings = new HashMap<String, ObjectFieldMapping>();
this.passthroughDecoratedTypes = new HashSet<String>();
for(HollowSchema schema : stateEngine.getSchemas()) {
hollowSchemas.put(schema.getName(), schema);
if(schema instanceof HollowObjectSchema)
canonicalObjectFieldMappings.put(schema.getName(), new ObjectFieldMapping(schema.getName(), this));
}
////TODO: Special 'passthrough' processing.
this.passthroughRecords = new ThreadLocal<PassthroughWriteRecords>();
}
@Override
public void addFieldProcessor(FieldProcessor... processors) {
super.addFieldProcessor(processors);
for(FieldProcessor processor : processors) {
ObjectFieldMapping ofm = canonicalObjectFieldMappings.get(processor.getEntityName());
if(ofm != null) {
ofm.addFieldProcessor(processor);
} else {
for(Map.Entry<String, ObjectFieldMapping> entry : canonicalObjectFieldMappings.entrySet()) {
entry.getValue().addFieldProcessor(processor);
}
}
}
}
public void remapFieldPath(String type, String fieldName, String... fieldPaths) {
canonicalObjectFieldMappings.get(type).addRemappedPath(fieldName, fieldPaths);
}
////TODO: Refactor upstream json data to not require special 'passthrough' processing.
public void addPassthroughDecoratedType(String type) {
passthroughDecoratedTypes.add(type);
}
public void populate(File jsonFile) throws Exception {
processFile(jsonFile, Integer.MAX_VALUE);
}
public void populate(Reader jsonReader) throws Exception {
processFile(jsonReader, Integer.MAX_VALUE);
}
public int processRecord(String singleRecord) throws IOException {
return processRecord(singleRecord, null);
}
public int processRecord(String singleRecord, FlatRecordWriter flatRecordWriter) throws IOException {
JsonFactory factory = new JsonFactory();
JsonParser parser = factory.createParser(new StringReader(singleRecord));
return processRecord(parser, flatRecordWriter);
}
@Override
protected int processRecord(JsonParser parser) throws IOException {
return processRecord(parser, null);
}
protected int processRecord(JsonParser parser, FlatRecordWriter flatRecordWriter) throws IOException {
initHollowWriteRecordsIfNecessary();
//parser.nextToken();
return parseSubType(parser, flatRecordWriter, parser.nextToken(), typeName);
}
private int parseSubType(JsonParser parser, FlatRecordWriter flatRecordWriter, JsonToken currentToken, String subType) throws IOException {
HollowSchema subTypeSchema = hollowSchemas.get(subType);
switch(subTypeSchema.getSchemaType()) {
case OBJECT:
if(currentToken != JsonToken.START_OBJECT)
throw new IOException("Expecting to parse a " + subType + ", which is a " + subTypeSchema.getSchemaType() + ", expected JsonToken.START_OBJECT but instead found a " + currentToken.toString());
return addObject(parser, flatRecordWriter, subType);
case LIST:
case SET:
if(currentToken != JsonToken.START_ARRAY)
throw new IOException("Expecting to parse a " + subType + ", which is a " + subTypeSchema.getSchemaType() + ", expected JsonToken.START_ARRAY but instead found a " + currentToken.toString());
return addSubArray(parser, flatRecordWriter, subType, getWriteRecord(subType));
case MAP:
switch(currentToken) {
case START_ARRAY:
return addStructuredMap(parser, flatRecordWriter, subType, (HollowMapWriteRecord) getWriteRecord(subType));
case START_OBJECT:
return addUnstructuredMap(parser, flatRecordWriter, subType, (HollowMapWriteRecord) getWriteRecord(subType));
default:
throw new IOException("Expecting to parse a " + subType + ", which is a " + subTypeSchema.getSchemaType() + ", expected JsonToken.START_ARRAY or JsonToken.START_OBJECT but instead found a " + currentToken.toString());
}
}
throw new IOException();
}
private int addObject(JsonParser parser, FlatRecordWriter flatRecordWriter, String typeName) throws IOException {
ObjectFieldMapping objectMapping = getObjectFieldMapping(typeName);
Boolean passthroughDecoratedTypes = null;
JsonToken token = parser.nextToken();
PassthroughWriteRecords rec = null;
String fieldName = null;
try {
while (token != JsonToken.END_OBJECT) {
if(token != JsonToken.FIELD_NAME) {
fieldName = parser.getCurrentName();
ObjectMappedFieldPath mappedFieldPath = objectMapping.getMappedFieldPath(fieldName);
if(mappedFieldPath != null) {
addObjectField(parser, flatRecordWriter, token, mappedFieldPath);
} else {
if(passthroughDecoratedTypes == null) {
passthroughDecoratedTypes = Boolean.valueOf(this.passthroughDecoratedTypes.contains(typeName));
if(passthroughDecoratedTypes.booleanValue()) {
rec = getPassthroughWriteRecords();
}
}
if(passthroughDecoratedTypes.booleanValue()) {
addPassthroughField(parser, flatRecordWriter, token, fieldName, rec);
} else {
skipObjectField(parser, token);
}
}
}
token = parser.nextToken();
}
} catch (Exception ex) {
throw new IOException("Failed to parse field=" + fieldName + ", schema=" + typeName + ", token=" + token, ex);
}
if(passthroughDecoratedTypes != null && passthroughDecoratedTypes.booleanValue()) {
rec.passthroughRec.setReference("singleValues", addRecord("SingleValuePassthroughMap", rec.singleValuePassthroughMapRec, flatRecordWriter));
rec.passthroughRec.setReference("multiValues", addRecord("MultiValuePassthroughMap", rec.multiValuePassthroughMapRec, flatRecordWriter));
int passthroughOrdinal = addRecord("PassthroughData", rec.passthroughRec, flatRecordWriter);
return objectMapping.build(passthroughOrdinal, flatRecordWriter);
}
return objectMapping.build(-1, flatRecordWriter);
}
private void addPassthroughField(JsonParser parser, FlatRecordWriter flatRecordWriter, JsonToken token, String fieldName, PassthroughWriteRecords rec) throws IOException {
rec.passthroughMapKeyWriteRecord.reset();
rec.passthroughMapKeyWriteRecord.setString("value", fieldName);
int keyOrdinal = addRecord("MapKey", rec.passthroughMapKeyWriteRecord, flatRecordWriter);
switch(token) {
case START_ARRAY:
rec.multiValuePassthroughListRec.reset();
while(token != JsonToken.END_ARRAY) {
switch(token) {
case VALUE_FALSE:
case VALUE_TRUE:
case VALUE_NUMBER_INT:
case VALUE_NUMBER_FLOAT:
case VALUE_STRING:
rec.passthroughMapValueWriteRecord.reset();
rec.passthroughMapValueWriteRecord.setString("value", parser.getValueAsString());
int elementOrdinal = addRecord("String", rec.passthroughMapValueWriteRecord, flatRecordWriter);
rec.multiValuePassthroughListRec.addElement(elementOrdinal);
break;
default:
break;
}
token = parser.nextToken();
}
int valueListOrdinal = addRecord("ListOfString", rec.multiValuePassthroughListRec, flatRecordWriter);
rec.multiValuePassthroughMapRec.addEntry(keyOrdinal, valueListOrdinal);
break;
case VALUE_FALSE:
case VALUE_TRUE:
case VALUE_NUMBER_INT:
case VALUE_NUMBER_FLOAT:
case VALUE_STRING:
rec.passthroughMapValueWriteRecord.reset();
rec.passthroughMapValueWriteRecord.setString("value", parser.getValueAsString());
int valueOrdinal = addRecord("String", rec.passthroughMapValueWriteRecord, flatRecordWriter);
rec.singleValuePassthroughMapRec.addEntry(keyOrdinal, valueOrdinal);
break;
case VALUE_NULL:
break;
case START_OBJECT:
skipObject(parser);
break;
default:
break;
}
}
private void addObjectField(JsonParser parser, FlatRecordWriter flatRecordWriter, JsonToken token, ObjectMappedFieldPath mappedFieldPath) throws IOException {
if(mappedFieldPath == null) {
skipObjectField(parser, token);
} else {
HollowObjectWriteRecord writeRec = mappedFieldPath.getWriteRecord();
HollowObjectSchema schema = writeRec.getSchema();
String fieldName = mappedFieldPath.getFieldName();
int fieldPosition = mappedFieldPath.getFieldPosition();
FieldProcessor processor = mappedFieldPath.getFieldProcessor();
if(processor != null && token != JsonToken.VALUE_NULL) {
processor.processField(parser, stateEngine, writeRec);
return;
}
switch(token) {
case START_ARRAY:
case START_OBJECT:
int refOrdinal = parseSubType(parser, flatRecordWriter, token, schema.getReferencedType(fieldPosition));
writeRec.setReference(fieldName, refOrdinal);
break;
case VALUE_FALSE:
case VALUE_TRUE:
case VALUE_NUMBER_INT:
case VALUE_NUMBER_FLOAT:
case VALUE_STRING:
switch(schema.getFieldType(fieldPosition)) {
case BOOLEAN:
writeRec.setBoolean(fieldName, parser.getBooleanValue());
break;
case INT:
writeRec.setInt(fieldName, parser.getIntValue());
break;
case LONG:
writeRec.setLong(fieldName, parser.getLongValue());
break;
case DOUBLE:
writeRec.setDouble(fieldName, parser.getDoubleValue());
break;
case FLOAT:
writeRec.setFloat(fieldName, parser.getFloatValue());
break;
case STRING:
writeRec.setString(fieldName, parser.getValueAsString());
break;
case REFERENCE:
HollowObjectWriteRecord referencedRec = (HollowObjectWriteRecord) getWriteRecord(schema.getReferencedType(fieldPosition));
referencedRec.reset();
String refFieldName = referencedRec.getSchema().getFieldName(0);
switch(referencedRec.getSchema().getFieldType(0)) {
case BOOLEAN:
referencedRec.setBoolean(refFieldName, parser.getBooleanValue());
break;
case INT:
referencedRec.setInt(refFieldName, parser.getIntValue());
break;
case LONG:
referencedRec.setLong(refFieldName, parser.getLongValue());
break;
case DOUBLE:
referencedRec.setDouble(refFieldName, parser.getDoubleValue());
break;
case FLOAT:
referencedRec.setFloat(refFieldName, parser.getFloatValue());
break;
case STRING:
referencedRec.setString(refFieldName, parser.getValueAsString());
break;
default:
}
int referencedOrdinal = addRecord(schema.getReferencedType(fieldPosition), referencedRec, flatRecordWriter);
writeRec.setReference(fieldName, referencedOrdinal);
break;
default:
}
case VALUE_NULL:
break;
default:
}
}
}
private int addSubArray(JsonParser parser, FlatRecordWriter flatRecordWriter, String arrayType, HollowWriteRecord arrayRec) throws IOException {
JsonToken token = parser.nextToken();
arrayRec.reset();
HollowCollectionSchema schema = (HollowCollectionSchema) hollowSchemas.get(arrayType);
ObjectFieldMapping valueRec = null;
ObjectMappedFieldPath fieldMapping = null;
while(token != JsonToken.END_ARRAY) {
int elementOrdinal;
if(token == JsonToken.START_OBJECT || token == JsonToken.START_ARRAY) {
elementOrdinal = parseSubType(parser, flatRecordWriter, token, schema.getElementType());
} else {
if(valueRec == null) {
valueRec = getObjectFieldMapping(schema.getElementType());
fieldMapping = valueRec.getSingleFieldMapping();
}
addObjectField(parser, flatRecordWriter, token, fieldMapping);
elementOrdinal = valueRec.build(-1, flatRecordWriter);
}
if(arrayRec instanceof HollowListWriteRecord) {
((HollowListWriteRecord) arrayRec).addElement(elementOrdinal);
} else {
((HollowSetWriteRecord)arrayRec).addElement(elementOrdinal);
}
token = parser.nextToken();
}
return addRecord(arrayType, arrayRec, flatRecordWriter);
}
private int addStructuredMap(JsonParser parser, FlatRecordWriter flatRecordWriter, String mapTypeName, HollowMapWriteRecord mapRec) throws IOException {
JsonToken token = parser.nextToken();
mapRec.reset();
HollowMapSchema schema = (HollowMapSchema) hollowSchemas.get(mapTypeName);
while(token != JsonToken.END_ARRAY) {
if(token == JsonToken.START_OBJECT) {
int keyOrdinal = -1, valueOrdinal = -1;
while(token != JsonToken.END_OBJECT) {
if(token == JsonToken.START_OBJECT || token == JsonToken.START_ARRAY) {
if("key".equals(parser.getCurrentName()))
keyOrdinal = parseSubType(parser, flatRecordWriter, token, schema.getKeyType());
else if("value".equals(parser.getCurrentName()))
valueOrdinal = parseSubType(parser, flatRecordWriter, token, schema.getValueType());
}
token = parser.nextToken();
}
mapRec.addEntry(keyOrdinal, valueOrdinal);
}
token = parser.nextToken();
}
return addRecord(schema.getName(), mapRec, flatRecordWriter);
}
private int addUnstructuredMap(JsonParser parser, FlatRecordWriter flatRecordWriter, String mapTypeName, HollowMapWriteRecord mapRec) throws IOException {
mapRec.reset();
HollowMapSchema schema = (HollowMapSchema) hollowSchemas.get(mapTypeName);
ObjectFieldMapping valueRec = null;
ObjectMappedFieldPath fieldMapping = null;
JsonToken token = parser.nextToken();
while(token != JsonToken.END_OBJECT) {
if(token != JsonToken.FIELD_NAME) {
HollowObjectWriteRecord mapKeyWriteRecord = (HollowObjectWriteRecord) getWriteRecord(schema.getKeyType());
String fieldName = mapKeyWriteRecord.getSchema().getFieldName(0);
mapKeyWriteRecord.reset();
switch(mapKeyWriteRecord.getSchema().getFieldType(0)) {
case STRING:
mapKeyWriteRecord.setString(fieldName, parser.getCurrentName());
break;
case BOOLEAN:
mapKeyWriteRecord.setBoolean(fieldName, Boolean.valueOf(parser.getCurrentName()));
break;
case INT:
mapKeyWriteRecord.setInt(fieldName, Integer.parseInt(parser.getCurrentName()));
break;
case LONG:
mapKeyWriteRecord.setLong(fieldName, Long.parseLong(parser.getCurrentName()));
break;
case DOUBLE:
mapKeyWriteRecord.setDouble(fieldName, Double.parseDouble(parser.getCurrentName()));
break;
case FLOAT:
mapKeyWriteRecord.setFloat(fieldName, Float.parseFloat(parser.getCurrentName()));
break;
default:
throw new IOException("Cannot parse type " + mapKeyWriteRecord.getSchema().getFieldType(0) + " as key in map (" + mapKeyWriteRecord.getSchema().getName() + ")");
}
int keyOrdinal = addRecord(schema.getKeyType(), mapKeyWriteRecord, flatRecordWriter);
int valueOrdinal;
if(token == JsonToken.START_OBJECT || token == JsonToken.START_ARRAY) {
valueOrdinal = parseSubType(parser, flatRecordWriter, token, schema.getValueType());
} else {
if(valueRec == null) {
valueRec = getObjectFieldMapping(schema.getValueType());
fieldMapping = valueRec.getSingleFieldMapping();
}
addObjectField(parser, flatRecordWriter, token, fieldMapping);
valueOrdinal = valueRec.build(-1, flatRecordWriter);
}
mapRec.addEntry(keyOrdinal, valueOrdinal);
}
token = parser.nextToken();
}
return addRecord(schema.getName(), mapRec, flatRecordWriter);
}
private void skipObject(JsonParser parser) throws IOException {
JsonToken token = parser.nextToken();
try {
while (token != JsonToken.END_OBJECT) {
skipObjectField(parser, token);
token = parser.nextToken();
}
} catch (Exception ex) {
throw new IOException(ex);
}
}
private void skipSubArray(JsonParser parser) throws IOException {
JsonToken token = parser.nextToken();
while(token != JsonToken.END_ARRAY) {
if(token == JsonToken.START_OBJECT) {
skipObject(parser);
} else {
skipObjectField(parser, token);
}
token = parser.nextToken();
}
}
private void skipObjectField(JsonParser parser, JsonToken token) throws IOException {
switch(token) {
case START_ARRAY:
skipSubArray(parser);
break;
case START_OBJECT:
skipObject(parser);
break;
case VALUE_FALSE:
case VALUE_TRUE:
case VALUE_NUMBER_INT:
case VALUE_NUMBER_FLOAT:
case VALUE_STRING:
case VALUE_NULL:
default:
}
}
private int addRecord(String type, HollowWriteRecord rec, FlatRecordWriter flatRecordWriter) {
if(flatRecordWriter != null) {
HollowSchema schema = stateEngine.getSchema(type);
return flatRecordWriter.write(schema, rec);
}
return stateEngine.add(type, rec);
}
private void initHollowWriteRecordsIfNecessary() {
if (hollowWriteRecordsHolder.get() == null) {
synchronized (this) {
if (hollowWriteRecordsHolder.get() == null) {
Map<String, HollowWriteRecord> lookupMap = createWriteRecords(stateEngine);
hollowWriteRecordsHolder.set(lookupMap);
objectFieldMappingHolder.set(cloneFieldMappings());
}
}
}
}
private static Map<String, HollowWriteRecord> createWriteRecords(HollowWriteStateEngine stateEngine) {
Map<String, HollowWriteRecord> hollowWriteRecords = new HashMap<>();
for (HollowSchema schema : stateEngine.getSchemas()) {
switch (schema.getSchemaType()) {
case LIST:
hollowWriteRecords.put(schema.getName(), new HollowListWriteRecord());
break;
case MAP:
hollowWriteRecords.put(schema.getName(), new HollowMapWriteRecord());
break;
case OBJECT:
hollowWriteRecords.put(schema.getName(), new HollowObjectWriteRecord((HollowObjectSchema)schema));
break;
case SET:
hollowWriteRecords.put(schema.getName(), new HollowSetWriteRecord());
break;
}
}
return hollowWriteRecords;
}
private Map<String, ObjectFieldMapping> cloneFieldMappings() {
Map<String, ObjectFieldMapping> clonedMap = new HashMap<String, ObjectFieldMapping>();
for(Map.Entry<String, ObjectFieldMapping> entry : canonicalObjectFieldMappings.entrySet()) {
clonedMap.put(entry.getKey(), entry.getValue().clone());
}
return clonedMap;
}
ObjectFieldMapping getObjectFieldMapping(String type) throws IOException {
Map<String, ObjectFieldMapping> objectFieldMappings = objectFieldMappingHolder.get();
ObjectFieldMapping mapping = objectFieldMappings.get(type);
if(mapping == null) {
throw new IOException("WriteRecord for " + type + " not found. Make sure Schema Discovery is done correctly.");
}
return mapping;
}
HollowWriteRecord getWriteRecord(String type) throws IOException {
Map<String, HollowWriteRecord> hollowWriteRecords = hollowWriteRecordsHolder.get();
HollowWriteRecord wRec = hollowWriteRecords.get(type);
if (wRec == null) {
throw new IOException("WriteRecord for " + type + " not found. Make sure Schema Discovery is done correctly.");
}
return wRec;
}
private PassthroughWriteRecords getPassthroughWriteRecords() {
PassthroughWriteRecords rec;
rec = passthroughRecords.get();
if(rec == null) {
rec = new PassthroughWriteRecords();
passthroughRecords.set(rec);
}
rec.passthroughRec.reset();
rec.multiValuePassthroughMapRec.reset();
rec.singleValuePassthroughMapRec.reset();
return rec;
}
////TODO: Special 'passthrough' processing.
private class PassthroughWriteRecords {
final HollowObjectWriteRecord passthroughRec;
final HollowObjectWriteRecord passthroughMapKeyWriteRecord;
final HollowObjectWriteRecord passthroughMapValueWriteRecord;
final HollowMapWriteRecord singleValuePassthroughMapRec;
final HollowMapWriteRecord multiValuePassthroughMapRec;
final HollowListWriteRecord multiValuePassthroughListRec;
public PassthroughWriteRecords() {
////TODO: Special 'passthrough' processing.
this.passthroughRec = hollowSchemas.get("PassthroughData") != null ? new HollowObjectWriteRecord((HollowObjectSchema)hollowSchemas.get("PassthroughData")) : null;
this.passthroughMapKeyWriteRecord = hollowSchemas.get("MapKey") != null ? new HollowObjectWriteRecord((HollowObjectSchema)hollowSchemas.get("MapKey")) : null;
this.passthroughMapValueWriteRecord = hollowSchemas.get("String") != null ? new HollowObjectWriteRecord((HollowObjectSchema)hollowSchemas.get("String")) : null;
this.singleValuePassthroughMapRec = new HollowMapWriteRecord();
this.multiValuePassthroughMapRec = new HollowMapWriteRecord();
this.multiValuePassthroughListRec = new HollowListWriteRecord();
}
}
}
| 9,428 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/discover/HollowDiscoveredField.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.discover;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
public class HollowDiscoveredField {
FieldType fieldType;
String referencedType;
public HollowDiscoveredField(FieldType fieldType, String referencedType) {
this.fieldType = fieldType;
this.referencedType = referencedType;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("HollowDiscoveredField [fieldType=").append(fieldType).append(", referencedType=").append(referencedType).append("]");
return builder.toString();
}
}
| 9,429 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/discover/SchemaSolidifier.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.discover;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
public class SchemaSolidifier {
public static Collection<HollowSchema> convertDiscoveredSchemas(Collection<HollowDiscoveredSchema> discoveredSchemas) {
Map<String, HollowSchema> schemaMap = new HashMap<String, HollowSchema>();
for(HollowDiscoveredSchema discoveredSchema : discoveredSchemas) {
HollowSchema schema = discoveredSchema.toHollowSchema();
schemaMap.put(schema.getName(), schema);
if(schema instanceof HollowMapSchema) {
String keyType = ((HollowMapSchema)schema).getKeyType();
if(!schemaMap.containsKey(keyType))
schemaMap.put(keyType, getStringSchema(keyType));
} else if(referencesGenericStringSchema(schema)) {
if(!schemaMap.containsKey("String"))
schemaMap.put("String", getStringSchema("String"));
}
}
return schemaMap.values();
}
private static boolean referencesGenericStringSchema(HollowSchema schema) {
if(schema instanceof HollowObjectSchema) {
HollowObjectSchema objSchema = (HollowObjectSchema)schema;
for(int i=0;i<objSchema.numFields();i++) {
if("String".equals(objSchema.getReferencedType(i))) {
return true;
}
}
}
return false;
}
private static HollowObjectSchema getStringSchema(String schemaName) {
HollowObjectSchema schema = new HollowObjectSchema(schemaName, 1);
schema.addField("value", FieldType.STRING);
return schema;
}
}
| 9,430 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/discover/HollowSchemaNamer.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.discover;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class HollowSchemaNamer {
private final ConcurrentHashMap<String, Map<String, String>> listNames = new ConcurrentHashMap<String, Map<String, String>>();
private final ConcurrentHashMap<String, Map<String, String>> subObjectNames = new ConcurrentHashMap<String, Map<String, String>>();
public String subObjectName(String typeName, String prefix, String fieldName) {
return subTypeName(subObjectNames, typeName, prefix, fieldName);
}
public String subCollectionName(String typeName, String prefix, String fieldName) {
return subTypeName(listNames, typeName, prefix, fieldName);
}
public String subTypeName(ConcurrentHashMap<String, Map<String, String>> subNamesMap, String typeName, String prefix, String fieldName) {
Map<String, String> typeNamesMap = subNamesMap.get(typeName);
if(typeNamesMap == null) {
synchronized (subNamesMap) {
typeNamesMap = subNamesMap.get(typeName);
if (typeNamesMap == null) {
typeNamesMap = new ConcurrentHashMap<String, String>();
Map<String, String> existingMap = subNamesMap.putIfAbsent(typeName, typeNamesMap);
if(existingMap != null)
typeNamesMap = existingMap;
}
}
}
String name = typeNamesMap.get(fieldName);
if(name == null) {
synchronized (typeNamesMap) {
name = typeNamesMap.get(fieldName);
if (name == null) {
name = typeName + prefix + uppercaseFirstCharacter(fieldName);
name = name.intern();
typeNamesMap.put(fieldName, name);
}
}
}
return name;
}
public String schemaNameFromPropertyPath(String propertyPath) {
StringBuilder schemaName = new StringBuilder();
while(propertyPath.indexOf('.') != -1) {
String nextToken = propertyPath.substring(0, propertyPath.indexOf('.'));
schemaName.append(uppercaseFirstCharacter(nextToken));
propertyPath = propertyPath.substring(propertyPath.indexOf('.') + 1);
}
schemaName.append(uppercaseFirstCharacter(propertyPath));
return schemaName.toString();
}
private String uppercaseFirstCharacter(String value) {
if (value == null) return "";
return value.substring(0, 1).toUpperCase() + value.substring(1);
}
}
| 9,431 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/discover/HollowDiscoveredSchema.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.discover;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class HollowDiscoveredSchema {
final String schemaName;
final DiscoveredSchemaType type;
final String subType;
final Map<String, HollowDiscoveredField> fields;
public HollowDiscoveredSchema(String schemaName, DiscoveredSchemaType schemaType, String subType) {
this.schemaName = schemaName;
this.type = schemaType;
this.subType = subType;
this.fields = schemaType == DiscoveredSchemaType.OBJECT ? new ConcurrentHashMap<String, HollowDiscoveredField>() : Collections.<String, HollowDiscoveredField> emptyMap();
// System.out.println(String.format("[new] HollowDiscoveredSchema: schemaName=%s, type=%s, subType=%s", schemaName, schemaType, subType));
}
public String getName() {
return schemaName;
}
public Map<String, HollowDiscoveredField> getFields() {
return fields;
}
public synchronized void addOrReplaceField(String fieldName, FieldType fieldType) {
fields.put(fieldName, new HollowDiscoveredField(fieldType, null));
//System.out.println(String.format("\t addOrReplaceField: schemaName=%s, fieldName=%s, filedType=%s", schemaName, fieldName, fieldType));
}
public void addField(String fieldName, FieldType fieldType) {
addField(fieldName, fieldType, null);
}
public synchronized void addField(String fieldName, FieldType fieldType, String referencedType) {
HollowDiscoveredField field = fields.get(fieldName);
if(field == null) {
fields.put(fieldName, new HollowDiscoveredField(fieldType, referencedType));
} else {
if(field.fieldType != fieldType) {
field.fieldType = mostRelaxed(field.fieldType, fieldType);
} else if(field.referencedType != referencedType) {
throw new RuntimeException("Cannot reference more than one type of object for a given field");
}
}
//System.out.println(String.format("\t addField: schemaName=%s, fieldName=%s, filedType=%s, referencedType=%s %s", schemaName, fieldName, fieldType, referencedType, this));
}
private static FieldType mostRelaxed(FieldType ft1, FieldType ft2) {
if(ft1 == FieldType.STRING || ft2 == FieldType.STRING)
return FieldType.STRING;
if(ft1 == FieldType.DOUBLE || ft2 == FieldType.DOUBLE)
return FieldType.DOUBLE;
throw new RuntimeException("There is no compatible field type between " + ft1 + " and " + ft2);
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("HollowDiscoveredSchema [schemaName=");
builder.append(schemaName);
builder.append(", type=");
builder.append(type);
builder.append(", subType=");
builder.append(subType);
builder.append(", fields=");
builder.append(fields);
builder.append("]");
return builder.toString();
}
public HollowSchema toHollowSchema() {
switch(type) {
case LIST:
return new HollowListSchema(schemaName, subType);
case MAP:
return new HollowMapSchema(schemaName, "MapKey", subType);
case OBJECT:
HollowObjectSchema schema = new HollowObjectSchema(schemaName, fields.size());
for(Map.Entry<String, HollowDiscoveredField> entry : fields.entrySet()) {
if(entry.getValue().fieldType == FieldType.STRING) {
schema.addField(entry.getKey(), FieldType.REFERENCE, "String");
} else {
schema.addField(entry.getKey(), entry.getValue().fieldType, entry.getValue().referencedType);
}
}
return schema;
}
throw new IllegalStateException("HollowDiscoveredSchema type must be one of LIST,MAP,OBJECT. Was " + type);
}
}
| 9,432 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/discover/DiscoveredSchemaType.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.discover;
public enum DiscoveredSchemaType {
OBJECT,
LIST,
MAP
}
| 9,433 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/discover/HollowJsonAdapterSchemaDiscoverer.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.discover;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.jsonadapter.AbstractHollowJsonAdaptorTask;
import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
public class HollowJsonAdapterSchemaDiscoverer extends AbstractHollowJsonAdaptorTask {
private final Set<String> mapTypes;
private final Map<String, HollowDiscoveredSchema> discoveredSchemas;
private final HollowSchemaNamer schemaNamer;
public HollowJsonAdapterSchemaDiscoverer(String typeName) {
super(typeName, "scan");
this.mapTypes = new HashSet<String>();
this.discoveredSchemas = new ConcurrentHashMap<String, HollowDiscoveredSchema>();
this.schemaNamer = new HollowSchemaNamer();
}
public void addMapTypes(String... types) {
for(String type : types) {
String mapType = schemaNamer.schemaNameFromPropertyPath(type);
mapTypes.add(mapType);
}
}
public void addMapTypes(Set<String> types) {
for (String type : types) {
String mapType = schemaNamer.schemaNameFromPropertyPath(type);
mapTypes.add(mapType);
}
}
@Override
protected int processRecord(JsonParser parser) throws IOException {
if (isDebug) System.out.println("\nProcessRecord: " + typeName);
final HollowDiscoveredSchema schema = discoveredSchema(typeName, DiscoveredSchemaType.OBJECT, null);
parser.nextToken();
discoverSchemas(parser, schema);
return -1;
}
public Collection<HollowDiscoveredSchema> discoverSchemas(File jsonFile, Integer maxSample) throws Exception {
processFile(jsonFile, maxSample);
return discoveredSchemas.values();
}
public Collection<HollowDiscoveredSchema> discoverSchemas(Reader jsonReader, Integer maxSample) throws Exception {
processFile(jsonReader, maxSample);
return discoveredSchemas.values();
}
private void discoverSchemas(JsonParser parser, HollowDiscoveredSchema schema) throws IOException {
JsonToken token = parser.nextToken();
while (token != JsonToken.END_OBJECT) {
String fieldName = parser.getCurrentName();
//if (isDebug) parser = print("discoverSchemas fieldName=" + fieldName, token, parser);
discoverSchemaField(parser, token, fieldName, schema);
token = parser.nextToken();
}
}
private void discoverSchemaField(JsonParser parser, JsonToken token, String fieldName, HollowDiscoveredSchema schema) throws IOException {
if(token != JsonToken.FIELD_NAME) {
switch(token) {
case START_ARRAY:
String listName = schemaNamer.subCollectionName(schema.schemaName, "ArrayOf", fieldName);
String elementName = schemaNamer.subObjectName(schema.schemaName, "", fieldName);
if (isDebug) System.out.println(String.format("\t ARR[START] token=%s schemaName=%s fieldName=%s listName=%s elementName=%s", token, schema.schemaName, fieldName, listName, elementName));
discoveredSchema(listName, DiscoveredSchemaType.LIST, elementName);
schema.addField(fieldName, FieldType.REFERENCE, listName);
HollowDiscoveredSchema elementSchema = discoveredSchema(elementName, DiscoveredSchemaType.OBJECT, null);
discoverSubArraySchemas(parser, elementSchema);
if (isDebug) System.out.println(String.format("\t ARR[END] token=%s schemaName=%s fieldName=%s listName=%s elementName=%s elementSchema=%s", token, schema.schemaName, fieldName, listName, elementName, elementSchema));
break;
case START_OBJECT:
String subObjectName = schemaNamer.subObjectName(schema.schemaName, "", fieldName);
//if (isDebug) System.out.println("\t\t [MAP CHECK] subObjectName=" + subObjectName + "\t" + mapTypes.contains(subObjectName) + "\t" + mapTypes);
if(mapTypes.contains(subObjectName)) {
String subMapName = schemaNamer.subCollectionName(schema.schemaName, "MapOf", fieldName);
if (isDebug) System.out.println(String.format("\t MAP[START] token=%s schemaName=%s fieldName=%s subMapName=%s subObjectName=%s", token, schema.schemaName, fieldName, subMapName, subObjectName));
discoveredSchema(subMapName, DiscoveredSchemaType.MAP, subObjectName);
schema.addField(fieldName, FieldType.REFERENCE, subMapName);
HollowDiscoveredSchema valueSchema = discoveredSchema(subObjectName, DiscoveredSchemaType.OBJECT, null);
discoverSubMapSchemas(parser, valueSchema);
if (isDebug) System.out.println(String.format("\t MAP[END] token=%s schemaName=%s fieldName=%s subMapName=%s subObjectName=%s valueSchema=%s", token, schema.schemaName, fieldName, subMapName, subObjectName, valueSchema));
} else {
if (isDebug) System.out.println(String.format("\t OBJ[START] token=%s schemaName=%s fieldName=%s subObjectName=%s", token, schema.schemaName, fieldName, subObjectName));
HollowDiscoveredSchema subObjectSchema = discoveredSchema(subObjectName, DiscoveredSchemaType.OBJECT, null);
if (fieldName != null) schema.addField(fieldName, FieldType.REFERENCE, subObjectName);
discoverSchemas(parser, subObjectSchema);
if (isDebug) System.out.println(String.format("\t OBJ[END] token=%s schemaName=%s fieldName=%s subObjectName=%s subObjectSchema=%s", token, schema.schemaName, fieldName, subObjectName, subObjectSchema));
}
break;
case VALUE_NUMBER_INT:
if (isDebug) System.out.println(String.format("\t FIELD token=%s schemaName=%s fieldName=%s value=%s", token, schema.schemaName, fieldName, parser.getLongValue()));
schema.addField(fieldName, FieldType.LONG);
break;
case VALUE_NUMBER_FLOAT:
if (isDebug) System.out.println(String.format("\t FIELD token=%s schemaName=%s fieldName=%s value=%s", token, schema.schemaName, fieldName, parser.getDoubleValue()));
schema.addField(fieldName, FieldType.DOUBLE);
break;
case VALUE_NULL:
if (isDebug) System.out.println(String.format("\t FIELD token=%s schemaName=%s fieldName=%s", token, schema.schemaName, fieldName));
break;
case VALUE_STRING:
if (isDebug) System.out.println(String.format("\t FIELD token=%s schemaName=%s fieldName=%s value=%s", token, schema.schemaName, fieldName, parser.getValueAsString()));
schema.addField(fieldName, FieldType.STRING);
break;
case VALUE_FALSE:
case VALUE_TRUE:
if (isDebug) System.out.println(String.format("\t FIELD token=%s schemaName=%s fieldName=%s value=%s", token, schema.schemaName, fieldName, parser.getBooleanValue()));
schema.addField(fieldName, FieldType.BOOLEAN);
break;
default:
}
}
}
private void discoverSubArraySchemas(JsonParser parser, HollowDiscoveredSchema objectSchema) throws IOException {
JsonToken token = parser.nextToken();
while(token != JsonToken.END_ARRAY) {
if(token == JsonToken.START_OBJECT) {
discoverSchemas(parser, objectSchema);
} else {
discoverSchemaField(parser, token, "value", objectSchema);
}
token = parser.nextToken();
}
}
private void discoverSubMapSchemas(JsonParser parser, HollowDiscoveredSchema objectSchema) throws IOException {
JsonToken token = parser.nextToken();
if (isDebug) System.out.println("discoverSubMapSchemas[START]: token=" + token + ", fieldname=" + parser.getCurrentName());
while(token != JsonToken.END_OBJECT) {
if (isDebug) System.out.println("discoverSubMapSchemas[LOOP]: token=" + token + ", fieldname=" + parser.getCurrentName());
if(token != JsonToken.FIELD_NAME) {
if(token == JsonToken.START_OBJECT) {
if (isDebug) System.out.println("discoverSubMapSchemas[LOOP] discoverSchemas: token=" + token + ", fieldname=" + parser.getCurrentName());
discoverSchemas(parser, objectSchema);
} else {
if (isDebug) System.out.println("discoverSubMapSchemas[LOOP] discoverSchemaField: token=" + token + ", fieldname=" + parser.getCurrentName());
discoverSchemaField(parser, token, "value", objectSchema);
}
}
token = parser.nextToken();
}
if (isDebug) System.out.println("discoverSubMapSchemas[END]: token=" + token);
}
private HollowDiscoveredSchema discoveredSchema(String schemaName, DiscoveredSchemaType type, String listSubType) {
HollowDiscoveredSchema schema = discoveredSchemas.get(schemaName);
if(schema == null) {
synchronized (discoveredSchemas) {
schema = discoveredSchemas.get(schemaName);
if (schema == null) {
schema = new HollowDiscoveredSchema(schemaName, type, listSubType);
discoveredSchemas.put(schemaName, schema);
}
}
}
if(schema.type != type)
throw new RuntimeException(schemaName + ": Expected schema of type " + type + " but was " + schema.type);
return schema;
}
private static String padRight(String s, int n) {
return String.format("%1$-" + n + "s", s);
}
public static void analyzeSchemas(Collection<HollowDiscoveredSchema> schemas, int largeNumOfFieldsThreshold) {
for (HollowDiscoveredSchema schema : schemas) {
boolean isObjectWithLargeNumOfFieldsFields = (schema.type == DiscoveredSchemaType.OBJECT) && (schema.fields.size() > largeNumOfFieldsThreshold);
System.out.print("\t");
System.out.println(schema);
// Fields
int fieldCount = schema.fields.size();
if (fieldCount == 0) continue;
int i = 0;
int maxKeyLen = 0;
String fieldHeaderTemplate = isObjectWithLargeNumOfFieldsFields ? "[***] Object with lots of fields: %s" : "Field Count: %s";
StringBuilder builder = new StringBuilder("\t - ");
builder.append(String.format(fieldHeaderTemplate, fieldCount)).append("\n");
for (String key : schema.fields.keySet()) {
if (key.length() > maxKeyLen) maxKeyLen = key.length();
} // find max key len
for (Map.Entry<String, HollowDiscoveredField> entry : schema.fields.entrySet()) {
builder.append("\t\t").append(++i).append(": fieldname=").append(padRight(entry.getKey(), maxKeyLen)).append("\t -> ").append(entry.getValue()).append("\n");
}
System.out.print(builder);
}
}
}
| 9,434 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/util/JsonUtil.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.util;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.JsonNode;
import java.io.PrintStream;
public class JsonUtil {
public static void print(JsonNode node) throws Exception {
print(node.traverse());
}
public static void print(JsonParser parser) throws Exception {
JsonToken token = parser.nextToken();
print(parser, token, 0, System.out);
}
public static void print(JsonParser parser, PrintStream out) throws Exception {
JsonToken token = parser.nextToken();
print(parser, token, 0, out);
}
private static void print(int index, String value, PrintStream out) {
for (int i = 0; i < index; i++) {
out.print("\t");
}
out.println(value);
}
private static void print(JsonParser parser, JsonToken token, int index, PrintStream out) throws Exception {
if (index == 0) System.out.println("\n\n -----");
try {
while (token != null && token != JsonToken.END_OBJECT) {
switch (token) {
case START_ARRAY:
print(index, String.format("fieldname=%s, token=%s", parser.getCurrentName(), token), out);
print(parser, parser.nextToken(), index + 1, out);
break;
case START_OBJECT:
print(index, String.format("fieldname=%s, token=%s", parser.getCurrentName(), token), out);
print(parser, parser.nextToken(), index + 1, out);
break;
case VALUE_NUMBER_INT:
print(index, String.format("fieldname=%s, token=%s, value=%s", parser.getCurrentName(), token, parser.getLongValue()), out);
break;
case VALUE_NUMBER_FLOAT:
print(index, String.format("fieldname=%s, token=%s, value=%s", parser.getCurrentName(), token, parser.getDoubleValue()), out);
break;
case VALUE_NULL:
print(index, String.format("fieldname=%s, token=%s, value=NULL", parser.getCurrentName(), token), out);
break;
case VALUE_STRING:
print(index, String.format("fieldname=%s, token=%s, value=%s", parser.getCurrentName(), token, parser.getValueAsString()), out);
break;
case VALUE_FALSE:
case VALUE_TRUE:
print(index, String.format("fieldname=%s, token=%s, value=%s", parser.getCurrentName(), token, parser.getBooleanValue()), out);
break;
case FIELD_NAME:
//print(index, String.format("fieldname=%s, token=%s", parser.getCurrentName(), token));
break;
case END_ARRAY:
case END_OBJECT:
index--;
break;
default:
}
token = parser.nextToken();
}
} catch (Exception ex) {
ex.printStackTrace();
throw ex;
}
}
}
| 9,435 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/field/FieldProcessor.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.field;
import com.fasterxml.jackson.core.JsonParser;
import com.netflix.hollow.core.write.HollowObjectWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import java.io.IOException;
public interface FieldProcessor {
public String getEntityName();
public String getFieldName();
public void processField(JsonParser parser, HollowWriteStateEngine writeEngine, HollowObjectWriteRecord writeRec) throws IOException;
}
| 9,436 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/field
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/field/impl/AbstractFieldProcessor.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.field.impl;
import com.netflix.hollow.jsonadapter.field.FieldProcessor;
public abstract class AbstractFieldProcessor implements FieldProcessor {
protected final String entityName;
protected final String fieldName;
public AbstractFieldProcessor(String entityName, String fieldName) {
this.entityName = entityName;
this.fieldName = fieldName;
}
@Override
public String getEntityName() {
return this.entityName;
}
@Override
public String getFieldName() {
return this.fieldName;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((entityName == null) ? 0 : entityName.hashCode());
result = prime * result + ((fieldName == null) ? 0 : fieldName.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
AbstractFieldProcessor other = (AbstractFieldProcessor) obj;
if (entityName == null) {
if (other.entityName != null) return false;
} else if (!entityName.equals(other.entityName)) return false;
if (fieldName == null) {
if (other.fieldName != null) return false;
} else if (!fieldName.equals(other.fieldName)) return false;
return true;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("AbstractFieldProcessor [entityName=").append(entityName).append(", fieldName=").append(fieldName).append("]");
return builder.toString();
}
}
| 9,437 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/chunker/JsonArrayChunker.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.chunker;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayDeque;
import java.util.Queue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.Executor;
public class JsonArrayChunker {
private static final int DEFAULT_SEGMENT_LENGTH = 262144;
private static final int SEGMENT_QUEUE_SIZE = 32;
private final Reader reader;
private final Queue<CompletableFuture<JsonArrayChunkerInputSegment>> bufferSegments;
private final Executor executor;
private final int segmentLength;
private JsonArrayChunkerInputSegment currentSegment;
private long currentSegmentStartOffset;
private boolean eofReached;
public JsonArrayChunker(Reader reader, Executor executor) {
this(reader, executor, DEFAULT_SEGMENT_LENGTH);
}
JsonArrayChunker(Reader reader, Executor executor, int segmentLength) {
this.reader = reader;
this.bufferSegments = new ArrayDeque<>();
this.executor = executor;
this.segmentLength = segmentLength;
}
/**
* Initialize the chunker.
* Internally, this buffers an initial set of segments. We buffer until we have reached the end
* of the reader or filled up our SEGMENT_QUEUE_SIZE buffer. Adding a segment kicks off a
* {@link JsonArrayChunkerInputSegment#findSpecialCharacterOffsets task} that indexes the
* locations of all special characters in the segment.
*/
public void initialize() throws IOException {
while (!eofReached && bufferSegments.size() < SEGMENT_QUEUE_SIZE) {
fillOneSegment();
}
nextSegment();
}
@SuppressWarnings("resource")
public Reader nextChunk() throws IOException {
while(!currentSegment.nextSpecialCharacter()) {
if(!nextSegment())
return null;
}
if(currentSegment.specialCharacter() != '{')
throw new IllegalStateException("Bad json");
int nestedObjectCount = 1;
JsonArrayChunkReader chunkReader = new JsonArrayChunkReader(currentSegment, currentSegment.specialCharacterIteratorPosition());
boolean insideQuotes = false;
long lastEscapeCharacterLocation = Long.MIN_VALUE;
while(nestedObjectCount > 0) {
while(!currentSegment.nextSpecialCharacter()) {
if(!nextSegment())
throw new IllegalStateException("Bad json");
chunkReader.addSegment(currentSegment);
}
switch(currentSegment.specialCharacter()) {
case '{':
if(!insideQuotes)
nestedObjectCount++;
break;
case '}':
if(!insideQuotes)
nestedObjectCount--;
break;
case '\"':
long currentLocation = currentSegmentStartOffset + currentSegment.specialCharacterIteratorPosition();
if(lastEscapeCharacterLocation != (currentLocation - 1)) {
insideQuotes = !insideQuotes;
}
break;
case '\\':
currentLocation = currentSegmentStartOffset + currentSegment.specialCharacterIteratorPosition();
if(lastEscapeCharacterLocation != (currentLocation - 1))
lastEscapeCharacterLocation = currentLocation;
break;
}
}
chunkReader.setEndOffset(currentSegment.specialCharacterIteratorPosition() + 1);
return chunkReader;
}
private boolean nextSegment() throws IOException {
if (bufferSegments.isEmpty()) {
return false;
}
if (!eofReached) {
fillOneSegment();
}
currentSegmentStartOffset += segmentLength;
try {
currentSegment = bufferSegments.remove().join();
} catch (CompletionException e) {
Throwable t = e.getCause(); // unwrap
if (t instanceof IOException) {
throw (IOException) t;
} else {
throw t instanceof RuntimeException ? (RuntimeException) t : e;
}
}
return true;
}
private void fillOneSegment() throws IOException {
JsonArrayChunkerInputSegment seg = new JsonArrayChunkerInputSegment(segmentLength);
eofReached = seg.fill(reader);
bufferSegments.add(CompletableFuture.supplyAsync(seg::findSpecialCharacterOffsets, executor));
}
}
| 9,438 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/chunker/JsonArrayChunkReader.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.chunker;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.List;
public class JsonArrayChunkReader extends Reader {
private final List<JsonArrayChunkerInputSegment> segments;
private int lastSegmentEndPos;
private int currentSegment;
private int currentSegmentOffset;
public JsonArrayChunkReader(JsonArrayChunkerInputSegment firstSegment, int startOffset) {
this.segments = new ArrayList<JsonArrayChunkerInputSegment>(2);
segments.add(firstSegment);
this.currentSegment = 0;
this.currentSegmentOffset = startOffset;
}
public void addSegment(JsonArrayChunkerInputSegment segment) {
segments.add(segment);
}
public void setEndOffset(int endOffset) {
this.lastSegmentEndPos = endOffset;
}
@Override
public int read() {
while(currentSegment < segments.size()) {
int maxSrcPos = currentSegment == (segments.size() - 1) ? lastSegmentEndPos : segments.get(currentSegment).length();
if(currentSegmentOffset < maxSrcPos) {
return segments.get(currentSegment).charAt(currentSegmentOffset++);
}
currentSegment++;
currentSegmentOffset = 0;
}
return -1;
}
@Override
public int read(char[] cbuf, int off, int len) throws IOException {
if(currentSegment == segments.size())
return -1;
int totalCopiedBytes = 0;
while(currentSegment < segments.size()) {
int maxSrcPos = currentSegment == (segments.size() - 1) ? lastSegmentEndPos : segments.get(currentSegment).length();
int copiedBytes = segments.get(currentSegment).copyTo(cbuf, currentSegmentOffset, off, len, maxSrcPos);
len -= copiedBytes;
totalCopiedBytes += copiedBytes;
currentSegmentOffset += copiedBytes;
if(len == 0)
return totalCopiedBytes;
off += copiedBytes;
currentSegment++;
currentSegmentOffset = 0;
}
return totalCopiedBytes;
}
@Override
public void close() { }
}
| 9,439 |
0 |
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter
|
Create_ds/hollow/hollow-jsonadapter/src/main/java/com/netflix/hollow/jsonadapter/chunker/JsonArrayChunkerInputSegment.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.jsonadapter.chunker;
import com.netflix.hollow.core.util.IntList;
import java.io.IOException;
import java.io.Reader;
import org.apache.commons.io.IOUtils;
class JsonArrayChunkerInputSegment {
// special characters: 0:{ 1:} 2:" 3:\
private final char[] data;
private final IntList specialCharacterOffsets = new IntList();
private int dataLength;
private int specialCharacterIteratorPos = -1;
JsonArrayChunkerInputSegment(int len) {
this.data = new char[len];
}
boolean fill(Reader reader) throws IOException {
dataLength = IOUtils.read(reader, data);
return dataLength < data.length;
}
JsonArrayChunkerInputSegment findSpecialCharacterOffsets() {
for (int i = 0; i < data.length; i++) {
switch (data[i]) {
case '{':
specialCharacterOffsets.add(i);
break;
case '}':
specialCharacterOffsets.add(i | (1 << 30));
break;
case '\"':
specialCharacterOffsets.add(i | (2 << 30));
break;
case '\\':
specialCharacterOffsets.add(i | (3 << 30));
break;
default:
}
}
return this;
}
boolean nextSpecialCharacter() {
return ++specialCharacterIteratorPos < specialCharacterOffsets.size();
}
int specialCharacterIteratorPosition() {
return specialCharacterOffsets.get(specialCharacterIteratorPos) & 0x3FFFFFFF;
}
char specialCharacter() {
switch(specialCharacterOffsets.get(specialCharacterIteratorPos) >>> 30) {
case 0: return '{';
case 1: return '}';
case 2: return '\"';
case 3: return '\\';
}
throw new IllegalStateException();
}
int length() {
return dataLength;
}
char charAt(int offset) {
return data[offset];
}
int copyTo(char[] dest, int srcPos, int destPos, int len, int maxSrcPos) {
int bytesAvailable = maxSrcPos - srcPos;
if(bytesAvailable >= len) {
System.arraycopy(data, srcPos, dest, destPos, len);
return len;
} else {
System.arraycopy(data, srcPos, dest, destPos, bytesAvailable);
return bytesAvailable;
}
}
}
| 9,440 |
0 |
Create_ds/hollow/hollow-diff-ui/src/tools/java/com/netflix/hollow/diff
|
Create_ds/hollow/hollow-diff-ui/src/tools/java/com/netflix/hollow/diff/ui/DiffUITest.java
|
package com.netflix.hollow.diff.ui;
import com.netflix.hollow.diffview.FakeHollowDiffGenerator;
import com.netflix.hollow.tools.diff.HollowDiff;
import org.junit.Test;
public class DiffUITest {
@Test
public void test() throws Exception {
HollowDiff testDiff = new FakeHollowDiffGenerator().createFakeDiff();
HollowDiffUIServer server = new HollowDiffUIServer();
server.addDiff("diff", testDiff);
server.start();
server.join();
}
@Test
public void testBackwardsCompatibiltyWithJettyImplementation() throws Exception {
HollowDiff testDiff = new FakeHollowDiffGenerator().createFakeDiff();
com.netflix.hollow.diff.ui.jetty.HollowDiffUIServer server = new com.netflix.hollow.diff.ui.jetty.HollowDiffUIServer();
server.addDiff("diff", testDiff);
server.start();
server.join();
}
}
| 9,441 |
0 |
Create_ds/hollow/hollow-diff-ui/src/tools/java/com/netflix/hollow/diff
|
Create_ds/hollow/hollow-diff-ui/src/tools/java/com/netflix/hollow/diff/ui/HistoryUITest.java
|
package com.netflix.hollow.diff.ui;
import com.netflix.hollow.core.read.HollowBlobInput;
import com.netflix.hollow.core.read.engine.HollowBlobReader;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.write.HollowBlobWriter;
import com.netflix.hollow.core.write.HollowObjectTypeWriteState;
import com.netflix.hollow.core.write.HollowObjectWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.history.ui.HollowHistoryUIServer;
import com.netflix.hollow.tools.history.HollowHistory;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.BitSet;
import org.junit.Test;
/**
* A tool to create a simple delta chain with fake data and spin up history UIs
* that build history in (a) purely fwd direction, and (b) both fwd and reverse
* directions simultaneously. This is not run as a part of the test suite.
*
* Ordinal maps for data used in this test-
*
*
* V0
*
* 0: 3, 13
* 1: 4, 44
* 2: 15, 150
* 3: 16, 160
*
*
* V1
*
* 4: 1, 1
* 5: 2, 2
* 6: 3, 3
* 7: 4, 4
* 8: 5, 5
* 9: 6, 6
*
*
* V2
*
* 0: 2, 7
* 1: 5, 8
* 2: 7, 9
* 3: 8, 10
* 6: 3, 3
* 9: 6, 6
*
*
* V3
*
* 0: 2, 7
* 3: 8, 10
* 4: 1, 1
* 5: 3, 11
* 7: 6, 12
* 8: 7, 13
*
*
* V4
* 0: 2, 7
* 1: 1, 18
* 2: 3, 19
* 3: 8, 10
* 6: 15, 13
* 7: 6, 12
* 9: 18, 10
* 10: 28, 90
*
*/
public class HistoryUITest {
private static final String CUSTOM_VERSION_TAG = "myVersion";
private final int MAX_STATES = 10;
private HollowObjectSchema schema;
@Test
public void startServerOnPorts7777And7778() throws Exception {
HollowHistory historyD = createHistoryD();
HollowHistoryUIServer serverD = new HollowHistoryUIServer(historyD, 7777);
serverD.start();
HollowHistory historyR = createHistoryBidirectional();
HollowHistoryUIServer serverR = new HollowHistoryUIServer(historyR, 7778);
serverR.start();
// optionally, test dropping the oldest state
// historyR.removeHistoricalStates(1);
serverD.join();
serverR.join();
}
private HollowHistory createHistoryBidirectional() throws IOException {
HollowHistory history;
HollowWriteStateEngine stateEngine;
{
schema = new HollowObjectSchema("TypeA", 2);
stateEngine = new HollowWriteStateEngine();
schema.addField("a1", HollowObjectSchema.FieldType.INT);
schema.addField("a2", HollowObjectSchema.FieldType.INT);
stateEngine.addTypeState(new HollowObjectTypeWriteState(schema));
// v0
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v0");
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 3, 13 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 4, 44 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 15, 150 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 16, 160 });
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v0 = new ByteArrayOutputStream();
HollowBlobWriter writer = new HollowBlobWriter(stateEngine);
writer.writeSnapshot(baos_v0);
stateEngine.prepareForNextCycle();
// v1
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v1");
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 1, 1 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 2, 2 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 3, 3 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 4, 4 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 5, 5 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 6, 6 });
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v1 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v0_to_v1 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v1_to_v0 = new ByteArrayOutputStream();
writer = new HollowBlobWriter(stateEngine);
writer.writeSnapshot(baos_v1);
writer.writeDelta(baos_v0_to_v1);
writer.writeReverseDelta(baos_v1_to_v0);
stateEngine.prepareForNextCycle();
// v2
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v2");
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 2, 7 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 3, 3 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 5, 8 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 6, 6 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 7, 9 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 8, 10 });
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v2 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v1_to_v2 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v2_to_v1 = new ByteArrayOutputStream();
writer.writeSnapshot(baos_v2);
writer.writeDelta(baos_v1_to_v2);
writer.writeReverseDelta(baos_v2_to_v1);
stateEngine.prepareForNextCycle();
// v3
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v3");
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 1, 1 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 2, 7 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 3, 11 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 6, 12 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 7, 13 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 8, 10 });
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v2_to_v3 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v3_to_v2 = new ByteArrayOutputStream();
writer.writeDelta(baos_v2_to_v3);
writer.writeReverseDelta(baos_v3_to_v2);
// v4
stateEngine.prepareForNextCycle();
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v4");
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 1, 18 }); // 0
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 2, 7 }); // 1
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 3, 19 }); // 2
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 6, 12 }); // 3
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 15, 13 }); // 4
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 8, 10 }); // 5
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 18, 10 }); // 6
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 28, 90 }); // 7
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v4 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v4_to_v3 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v3_to_v4 = new ByteArrayOutputStream();
writer.writeSnapshot(baos_v4);
writer.writeDelta(baos_v3_to_v4);
writer.writeReverseDelta(baos_v4_to_v3);
// build history bi-directionally
HollowReadStateEngine fwdReadStateEngine = new HollowReadStateEngine();
HollowReadStateEngine revReadStateEngine = new HollowReadStateEngine();
HollowBlobReader fwdReader = new HollowBlobReader(fwdReadStateEngine);
HollowBlobReader revReader = new HollowBlobReader(revReadStateEngine);
fwdReader.readSnapshot(HollowBlobInput.serial(baos_v2.toByteArray()));
System.out.println("Ordinals populated in fwdReadStateEngine: ");
exploreOrdinals(fwdReadStateEngine);
revReader.readSnapshot(HollowBlobInput.serial(baos_v2.toByteArray()));
System.out.println("Ordinals populated in revReadStateEngine (same as fwdReadStateEngine): ");
exploreOrdinals(revReadStateEngine);
history = new HollowHistory(fwdReadStateEngine, 2L, MAX_STATES, true);
history.getKeyIndex().addTypeIndex("TypeA", "a1");
history.getKeyIndex().indexTypeField("TypeA", "a1");
history.initializeReverseStateEngine(revReadStateEngine, 2L);
fwdReader.applyDelta(HollowBlobInput.serial(baos_v2_to_v3.toByteArray()));
exploreOrdinals(fwdReadStateEngine);
history.deltaOccurred(3L);
revReader.applyDelta(HollowBlobInput.serial(baos_v2_to_v1.toByteArray()));
exploreOrdinals(revReadStateEngine);
history.reverseDeltaOccurred(1L);
fwdReader.applyDelta(HollowBlobInput.serial(baos_v3_to_v4.toByteArray()));
exploreOrdinals(fwdReadStateEngine);
history.deltaOccurred(4L);
revReader.applyDelta(HollowBlobInput.serial(baos_v1_to_v0.toByteArray()));
exploreOrdinals(revReadStateEngine);
history.reverseDeltaOccurred(0L);
}
return history;
}
private void exploreOrdinals(HollowReadStateEngine readStateEngine) {
System.out.println("CUSTOM_VERSION_TAG= " + readStateEngine.getHeaderTags().get(CUSTOM_VERSION_TAG));
for (HollowTypeReadState typeReadState : readStateEngine.getTypeStates()) {
BitSet populatedOrdinals = typeReadState.getPopulatedOrdinals();
System.out.println("SNAP: PopulatedOrdinals= " + populatedOrdinals);
int ordinal = populatedOrdinals.nextSetBit(0);
while (ordinal != -1) {
HollowObjectTypeReadState o = (HollowObjectTypeReadState) typeReadState;
System.out.println(String.format("%s: %s, %s", ordinal, o.readInt(ordinal, 0), o.readInt(ordinal, 1)));
ordinal = populatedOrdinals.nextSetBit(ordinal + 1);
}
}
}
private static void addRec(HollowWriteStateEngine stateEngine, HollowObjectSchema schema, String[] names, int[] vals) {
HollowObjectWriteRecord rec = new HollowObjectWriteRecord(schema);
for (int i = 0; i < names.length; i++) {
rec.setInt(names[i], vals[i]);
}
stateEngine.add(schema.getName(), rec);
}
private HollowHistory createHistoryD() throws IOException {
HollowHistory history;
HollowReadStateEngine readStateEngine;
HollowBlobReader reader;
HollowWriteStateEngine stateEngine;
{
schema = new HollowObjectSchema("TypeA", 2);
stateEngine = new HollowWriteStateEngine();
schema.addField("a1", HollowObjectSchema.FieldType.INT);
schema.addField("a2", HollowObjectSchema.FieldType.INT);
//attach schema to write state engine
stateEngine.addTypeState(new HollowObjectTypeWriteState(schema));
// v0
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v0");
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 3, 13 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 4, 44 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 15, 150 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 16, 160 });
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v0 = new ByteArrayOutputStream();
HollowBlobWriter writer = new HollowBlobWriter(stateEngine);
writer.writeSnapshot(baos_v0);
stateEngine.prepareForNextCycle();
// v1
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v1");
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 1, 1 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 2, 2 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 3, 3 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 4, 4 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 5, 5 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 6, 6 });
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v0_to_v1 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v1_to_v0 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v1 = new ByteArrayOutputStream();
writer = new HollowBlobWriter(stateEngine);
writer.writeSnapshot(baos_v1);
writer.writeDelta(baos_v0_to_v1);
writer.writeReverseDelta(baos_v1_to_v0);
stateEngine.prepareForNextCycle();
// v2
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v2");
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 2, 7 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 3, 3 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 5, 8 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 6, 6 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 7, 9 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 8, 10 });
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v2 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v1_to_v2 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v2_to_v1 = new ByteArrayOutputStream();
writer.writeSnapshot(baos_v2);
writer.writeDelta(baos_v1_to_v2);
writer.writeReverseDelta(baos_v2_to_v1);
stateEngine.prepareForNextCycle();
// v3
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v3");
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 1, 1 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 2, 7 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 3, 11 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 6, 12 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 7, 13 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 8, 10 });
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v3 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v2_to_v3 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v3_to_v2 = new ByteArrayOutputStream();
writer.writeSnapshot(baos_v3);
writer.writeDelta(baos_v2_to_v3);
writer.writeReverseDelta(baos_v3_to_v2);
stateEngine.prepareForNextCycle();
// v4
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v4");
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 1, 18 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 2, 7 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 3, 19 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 6, 12 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 15, 13 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 8, 10 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 18, 10 });
addRec(stateEngine, schema, new String[] { "a1", "a2" }, new int[] { 28, 90 });
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v4 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v3_to_v4 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v4_to_v3 = new ByteArrayOutputStream();
writer.writeDelta(baos_v3_to_v4);
writer.writeReverseDelta(baos_v4_to_v3);
writer.writeSnapshot(baos_v4);
// Build history
readStateEngine = new HollowReadStateEngine();
reader = new HollowBlobReader(readStateEngine);
reader.readSnapshot(HollowBlobInput.serial(baos_v0.toByteArray()));
history = new HollowHistory(readStateEngine, 0L, MAX_STATES);
history.getKeyIndex().addTypeIndex("TypeA", "a1");
reader.applyDelta(HollowBlobInput.serial(baos_v0_to_v1.toByteArray()));
history.deltaOccurred(1L);
reader.applyDelta(HollowBlobInput.serial(baos_v1_to_v2.toByteArray()));
history.deltaOccurred(2L);
reader.applyDelta(HollowBlobInput.serial(baos_v2_to_v3.toByteArray()));
history.deltaOccurred(3L);
reader.applyDelta(HollowBlobInput.serial(baos_v3_to_v4.toByteArray()));
history.deltaOccurred(4L);
}
return history;
}
}
| 9,442 |
0 |
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow/diffview/HollowEffigyCollectionPairerTest.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.diffview.effigy.HollowEffigy;
import com.netflix.hollow.diffview.effigy.HollowEffigy.Field;
import com.netflix.hollow.diffview.effigy.pairer.HollowEffigyCollectionPairer;
import com.netflix.hollow.diffview.effigy.pairer.HollowEffigyFieldPairer.EffigyFieldPair;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
public class HollowEffigyCollectionPairerTest {
@Test
public void test() {
HollowEffigy list1 = list(
element("1", 1, 1),
element("2", 2, 2),
element("3", 3, 3),
element("4", 4, 4)
);
HollowEffigy list2 = list(
element("3", 103, 103),
element("1", 2, 1),
element("2", 102, 2),
element("5", 5, 5),
element("1", 1, 1)
);
HollowEffigyCollectionPairer pairer = new HollowEffigyCollectionPairer(list1, list2, null);
List<EffigyFieldPair> pairs = pairer.pair();
Assert.assertEquals(6, pairs.size());
assertPair(pairs.get(0), "1", "1");
assertPair(pairs.get(1), "2", "2");
assertPair(pairs.get(2), "3", "3");
assertPair(pairs.get(3), "4", null);
assertPair(pairs.get(4), null, "1");
assertPair(pairs.get(5), null, "5");
}
private void assertPair(EffigyFieldPair pair, String expectedFromField1, String expectedToField1) {
if(expectedFromField1 != null) {
HollowEffigy element = (HollowEffigy) pair.getFrom().getValue();
Assert.assertEquals(expectedFromField1, element.getFields().get(0).getValue());
} else {
Assert.assertNull(pair.getFrom());
}
if(expectedToField1 != null) {
HollowEffigy element = (HollowEffigy) pair.getTo().getValue();
Assert.assertEquals(expectedToField1, element.getFields().get(0).getValue());
} else {
Assert.assertNull(pair.getTo());
}
}
private HollowEffigy list(HollowEffigy... elements) {
HollowEffigy list = new HollowEffigy("list");
for(HollowEffigy element : elements) {
list.add(new Field("element", element));
}
return list;
}
private HollowEffigy element(String field1, int field2, float field3) {
HollowEffigy eff = new HollowEffigy("element");
eff.add(new Field("field1", "STRING", field1));
eff.add(new Field("field2", "INT", field2));
eff.add(new Field("field3", "FLOAT", field3));
return eff;
}
}
| 9,443 |
0 |
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow/diffview/HollowHistoryUIServerTest.java
|
package com.netflix.hollow.diffview;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.history.ui.HollowHistoryUIServer;
import com.netflix.hollow.tools.history.HollowHistory;
import org.junit.Test;
public class HollowHistoryUIServerTest {
@Test
public void test() throws Exception {
HollowHistory hh = new HollowHistory(new HollowReadStateEngine(), Long.MAX_VALUE, 10);
HollowHistoryUIServer server = new HollowHistoryUIServer(hh, 7890);
server.start();
server.stop();
}
@Test
public void testBackwardsCompatibiltyWithJettyImplementation() throws Exception {
HollowHistory hh = new HollowHistory(new HollowReadStateEngine(), Long.MAX_VALUE, 10);
com.netflix.hollow.history.ui.jetty.HollowHistoryUIServer server = new com.netflix.hollow.history.ui.jetty.HollowHistoryUIServer(hh, 7890);
server.start();
server.stop();
}
}
| 9,444 |
0 |
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow/diffview/HollowDiffUIServerTest.java
|
package com.netflix.hollow.diffview;
import com.netflix.hollow.diff.ui.HollowDiffUIServer;
import com.netflix.hollow.tools.diff.HollowDiff;
import org.junit.Test;
public class HollowDiffUIServerTest {
@Test
public void test() throws Exception {
HollowDiff testDiff = new FakeHollowDiffGenerator().createFakeDiff();
HollowDiffUIServer server = new HollowDiffUIServer();
server.addDiff("diff", testDiff);
server.start();
server.stop();
}
@Test
public void testBackwardsCompatibiltyWithJettyImplementation() throws Exception {
HollowDiff testDiff = new FakeHollowDiffGenerator().createFakeDiff();
com.netflix.hollow.diff.ui.jetty.HollowDiffUIServer server = new com.netflix.hollow.diff.ui.jetty.HollowDiffUIServer();
server.addDiff("diff", testDiff);
server.start();
server.stop();
}
}
| 9,445 |
0 |
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow/diffview/HollowHistoryUITest.java
|
package com.netflix.hollow.diffview;
import static com.netflix.hollow.diffview.FakeHollowHistoryUtil.assertUiParity;
import static org.junit.Assert.assertNotNull;
import com.netflix.hollow.history.ui.HollowHistoryUI;
import com.netflix.hollow.history.ui.jetty.HollowHistoryUIServer;
import com.netflix.hollow.test.consumer.TestBlobRetriever;
import com.netflix.hollow.test.consumer.TestHollowConsumer;
import org.junit.Before;
import org.junit.Test;
public class HollowHistoryUITest {
private final int PORT_EXPECTED = 7777;
private final int PORT_ACTUAL = 7778;
private TestBlobRetriever testBlobRetriever;
private TestHollowConsumer consumerExpected; // builds history using only deltas
private TestHollowConsumer consumerFwd;
private TestHollowConsumer consumerRev;
private HollowHistoryUIServer historyUIServerExpected;
private HollowHistoryUIServer historyUIServerActual;
private HollowHistoryUI historyUiExpected; // built using fwd deltas application only
public HollowHistoryUITest() throws Exception {
testBlobRetriever = new TestBlobRetriever();
FakeHollowHistoryUtil.createDeltaChain(testBlobRetriever);
}
@Before
public void init() {
consumerExpected = new TestHollowConsumer.Builder()
.withBlobRetriever(testBlobRetriever)
.build();
consumerExpected.triggerRefreshTo(1);
historyUIServerExpected = new HollowHistoryUIServer(consumerExpected, PORT_EXPECTED);
consumerExpected.triggerRefreshTo(2);
consumerExpected.triggerRefreshTo(3);
consumerExpected.triggerRefreshTo(4);
consumerExpected.triggerRefreshTo(5);
historyUiExpected = historyUIServerExpected.getUI();
consumerFwd = new TestHollowConsumer.Builder()
.withBlobRetriever(testBlobRetriever)
.build();
consumerRev = new TestHollowConsumer.Builder()
.withBlobRetriever(testBlobRetriever)
.build();
}
@Test
public void historyUsingOnlyFwdConsumer() throws Exception {
consumerFwd.triggerRefreshTo(1); // snapshot
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, PORT_ACTUAL);
consumerFwd.triggerRefreshTo(2); // delta
consumerFwd.triggerRefreshTo(3); // delta
consumerFwd.triggerRefreshTo(4); // delta
consumerFwd.triggerRefreshTo(5); // delta
hostUisIfPairtyCheckFails();
}
@Test
public void historyUsingFwdAndRevConsumer_onlyRevDeltasApplied() throws Exception {
consumerFwd.triggerRefreshTo(5);
consumerRev.triggerRefreshTo(5);
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, consumerRev, PORT_ACTUAL);
consumerRev.triggerRefreshTo(4);
consumerRev.triggerRefreshTo(3);
consumerRev.triggerRefreshTo(2);
consumerRev.triggerRefreshTo(1);
hostUisIfPairtyCheckFails();
}
@Test
public void historyUsingFwdAndRevConsumer_bothFwdAndRevDeltasApplied_FwdFirst() throws Exception {
consumerFwd.triggerRefreshTo(3);
consumerRev.triggerRefreshTo(3);
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, consumerRev, PORT_ACTUAL);
consumerFwd.triggerRefreshTo(4);
consumerRev.triggerRefreshTo(2);
consumerFwd.triggerRefreshTo(5);
consumerRev.triggerRefreshTo(1);
hostUisIfPairtyCheckFails();
}
@Test
public void historyUsingFwdAndRevConsumer_traversingStatesAlreadyVisited() throws Exception {
consumerFwd.triggerRefreshTo(3);
consumerRev.triggerRefreshTo(3);
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, consumerRev, PORT_ACTUAL);
consumerFwd.triggerRefreshTo(4);
consumerRev.triggerRefreshTo(2);
consumerFwd.triggerRefreshTo(5);
consumerFwd.triggerRefreshTo(4);
consumerFwd.triggerRefreshTo(3);
consumerFwd.triggerRefreshTo(4);
consumerFwd.triggerRefreshTo(5);
consumerRev.triggerRefreshTo(1);
consumerExpected.triggerRefreshTo(4);
consumerExpected.triggerRefreshTo(3);
consumerExpected.triggerRefreshTo(4);
consumerExpected.triggerRefreshTo(5);
hostUisIfPairtyCheckFails();
}
@Test
public void historyUsingFwdAndRevConsumer_bothFwdAndRevDeltasApplied_RevFirst() throws Exception {
consumerFwd.triggerRefreshTo(3);
consumerRev.triggerRefreshTo(3);
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, consumerRev, PORT_ACTUAL);
consumerRev.triggerRefreshTo(2);
consumerFwd.triggerRefreshTo(4);
consumerRev.triggerRefreshTo(1);
consumerFwd.triggerRefreshTo(5);
hostUisIfPairtyCheckFails();
}
@Test
public void historyUsingFwdAndRevConsumer_doubleSnapshotInFwd() throws Exception {
consumerFwd.triggerRefreshTo(3);
consumerRev.triggerRefreshTo(3);
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, consumerRev, PORT_ACTUAL);
consumerRev.triggerRefreshTo(2);
consumerFwd.triggerRefreshTo(4);
consumerRev.triggerRefreshTo(1);
consumerFwd.triggerRefreshTo(5);
consumerFwd.triggerRefreshTo(6); // double snapshot on fwd consumer in bidirectional history (supported)
consumerExpected.triggerRefreshTo(6); // double snapshot for fwd-only history (supported)
hostUisIfPairtyCheckFails();
}
@Test(expected = UnsupportedOperationException.class)
public void historyUsingFwdAndRevConsumer_doubleSnapshotInRev() {
consumerFwd.triggerRefreshTo(3);
consumerRev.triggerRefreshTo(3);
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, consumerRev, PORT_ACTUAL);
consumerRev.triggerRefreshTo(2);
consumerFwd.triggerRefreshTo(4);
consumerRev.triggerRefreshTo(1);
consumerFwd.triggerRefreshTo(5);
consumerRev.triggerRefreshTo(0); // double snapshot in rev direction (not supported)
}
@Test
public void historyUsingFwdAndRevConsumer_backwardsCompatbileSchemaChange() throws Exception {
consumerFwd.triggerRefreshTo(7); // version in whcih actor type was introduced
consumerRev.triggerRefreshTo(7);
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, consumerRev, PORT_ACTUAL);
consumerRev.triggerRefreshTo(6);
assertNotNull(historyUIServerActual.getUI().getHistory().getHistoricalState(7).getDataAccess()
.getTypeDataAccess("Actor").getDataAccess());
}
@Test
public void historyUsingFwdAndRevConsumer_removeOldestState() throws Exception {
consumerFwd.triggerRefreshTo(3);
consumerRev.triggerRefreshTo(3);
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, consumerRev, PORT_ACTUAL);
consumerRev.triggerRefreshTo(2);
consumerFwd.triggerRefreshTo(4);
consumerRev.triggerRefreshTo(1);
consumerFwd.triggerRefreshTo(5);
// drop 1 state
historyUIServerActual.getUI().getHistory().removeHistoricalStates(1);
// expected history is built for versions 2 through 5
consumerExpected = new TestHollowConsumer.Builder()
.withBlobRetriever(testBlobRetriever)
.build();
consumerExpected.triggerRefreshTo(2);
historyUIServerExpected = new HollowHistoryUIServer(consumerExpected, PORT_EXPECTED);
consumerExpected.triggerRefreshTo(3);
consumerExpected.triggerRefreshTo(4);
consumerExpected.triggerRefreshTo(5);
historyUiExpected = historyUIServerExpected.getUI();
hostUisIfPairtyCheckFails();
}
@Test(expected=NullPointerException.class)
public void historyUsingFwdAndRevConsumer_noPastVersionsAvailableAtInit() {
// consumerFwd and consumerRev haven't incurred snapshot load yet
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, consumerRev, PORT_ACTUAL);
}
@Test(expected=NullPointerException.class)
public void historyUsingFwdOnly_noPastVersionsAvailableAtInit() {
// consumerFwd hasn't incurred snapshot load yet
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, PORT_ACTUAL);
}
@Test(expected=UnsupportedOperationException.class)
public void historyUsingFwdAndRevConsumer_revConsumerMustBeInitialized() throws Exception {
TestHollowConsumer consumerFwd = new TestHollowConsumer.Builder()
.withBlobRetriever(testBlobRetriever)
.build();
TestHollowConsumer consumerRev = new TestHollowConsumer.Builder()
.withBlobRetriever(testBlobRetriever)
.build();
consumerFwd.triggerRefreshTo(5);
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, consumerRev, PORT_ACTUAL);
// snapshots are only supported in the fwd direction, rev consumer should have been initialized
consumerRev.triggerRefreshTo(5);
}
@Test(expected=IllegalStateException.class)
public void historyUsingFwdAndRevConsumer_revAndFwdConsumersMustBeOnSameVersionAtInit() {
TestHollowConsumer consumerFwd = new TestHollowConsumer.Builder()
.withBlobRetriever(testBlobRetriever)
.build();
TestHollowConsumer consumerRev = new TestHollowConsumer.Builder()
.withBlobRetriever(testBlobRetriever)
.build();
consumerFwd.triggerRefreshTo(5);
consumerRev.triggerRefreshTo(3);
historyUIServerActual = new HollowHistoryUIServer(consumerFwd, consumerRev, PORT_ACTUAL);
}
private void hostUisIfPairtyCheckFails() throws Exception {
try {
assertUiParity(historyUiExpected, historyUIServerActual.getUI());
} catch (AssertionError | Exception e) {
System.out.println(String.format("Error when comparing expected and actual history UIs for parity. " +
"Expected and actual history UIs are hosted at ports %s and %s respectively. " +
"Be sure to open in different browsers for isolated sessions state stored in cookie which " +
"could affect the links generated in the output html",
PORT_EXPECTED, PORT_ACTUAL));
e.printStackTrace();
historyUIServerExpected.start();
historyUIServerActual.start();
historyUIServerActual.join();
}
}
}
| 9,446 |
0 |
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow/diffview/HollowEffigyDiffRecordTest.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.diffview.effigy.HollowEffigy;
import com.netflix.hollow.diffview.effigy.HollowEffigyFactory;
import com.netflix.hollow.diffview.effigy.pairer.HollowEffigyDiffRecord;
import com.netflix.hollow.tools.diff.HollowDiff;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Test;
public class HollowEffigyDiffRecordTest {
@Test
public void test() throws IOException {
HollowDiff diff = new FakeHollowDiffGenerator().createFakeDiff();
HollowEffigyFactory effigyFactory = new HollowEffigyFactory();
HollowEffigy fromEffigy = effigyFactory.effigy(diff.getFromStateEngine(), "TypeA", 0);
HollowEffigy toEffigy = effigyFactory.effigy(diff.getToStateEngine(), "TypeA", 0);
HollowEffigyDiffRecord diffRecord = new HollowEffigyDiffRecord(fromEffigy);
Assert.assertEquals(8, diffRecord.calculateDiff(toEffigy, 8));
}
}
| 9,447 |
0 |
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow/diffview/FakeHollowDiffGenerator.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.core.read.HollowBlobInput;
import com.netflix.hollow.core.read.engine.HollowBlobReader;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSetSchema;
import com.netflix.hollow.core.write.HollowBlobWriter;
import com.netflix.hollow.core.write.HollowListTypeWriteState;
import com.netflix.hollow.core.write.HollowListWriteRecord;
import com.netflix.hollow.core.write.HollowMapTypeWriteState;
import com.netflix.hollow.core.write.HollowMapWriteRecord;
import com.netflix.hollow.core.write.HollowObjectTypeWriteState;
import com.netflix.hollow.core.write.HollowObjectWriteRecord;
import com.netflix.hollow.core.write.HollowSetTypeWriteState;
import com.netflix.hollow.core.write.HollowSetWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
public class FakeHollowDiffGenerator {
HollowObjectSchema typeASchema;
HollowObjectSchema typeBSchema;
HollowObjectSchema typeCSchema;
HollowObjectSchema typeDSchema;
HollowListSchema listOfTypeCSchema;
HollowSetSchema setOfTypeDSchema;
HollowMapSchema mapOfTypeCToTypeDSchema;
private void setUp() {
typeASchema = new HollowObjectSchema("TypeA", 3);
typeASchema.addField("a1", FieldType.STRING);
typeASchema.addField("a2", FieldType.REFERENCE, "TypeB");
typeASchema.addField("a3", FieldType.REFERENCE, "MapOfTypeCToTypeD");
typeBSchema = new HollowObjectSchema("TypeB", 3);
typeBSchema.addField("b1", FieldType.INT);
typeBSchema.addField("b2", FieldType.REFERENCE, "ListOfTypeC");
typeBSchema.addField("b3", FieldType.REFERENCE, "SetOfTypeD");
typeCSchema = new HollowObjectSchema("TypeC", 2);
typeCSchema.addField("c1", FieldType.LONG);
typeCSchema.addField("c2", FieldType.BOOLEAN);
typeDSchema = new HollowObjectSchema("TypeD", 3);
typeDSchema.addField("d1", FieldType.FLOAT);
typeDSchema.addField("d2", FieldType.DOUBLE);
typeDSchema.addField("d3", FieldType.BYTES);
listOfTypeCSchema = new HollowListSchema("ListOfTypeC", "TypeC");
setOfTypeDSchema = new HollowSetSchema("SetOfTypeD", "TypeD");
mapOfTypeCToTypeDSchema = new HollowMapSchema("MapOfTypeCToTypeD", "TypeC", "TypeD");
}
public HollowDiff createFakeDiff() throws IOException {
setUp();
HollowWriteStateEngine fromStateEngine = newWriteStateEngine();
HollowWriteStateEngine toStateEngine = newWriteStateEngine();
//// FIRST OBJECT PAIR ////
addRec(fromStateEngine,
"recordOne", 1,
cList(
c(1001, true),
c(1002, true),
c(1003, true)
),
dSet(
d(1.001f, 1.00001d, new byte[]{ 1, 1 }),
d(1.002f, 1.00002d, new byte[]{ 1, 2 })
),
map(
entry(
c(1001, true),
d(1.001f, 1.00001d, new byte[]{ 1, 1 })
),
entry(
c(1002, true),
d(1.002f, 1.00002d, new byte[]{ 1, 2 })
)
));
addRec(toStateEngine,
"recordOne", 1,
cList(
c(1001, false), // now false instead of true
c(1002, true),
c(1003, false) // now false instead of true
),
dSet(
d(1.001f, 1.00001d, new byte[]{ 1, 9 }), /// 9 instead of 1
d(1.002f, 1.00002d, new byte[]{ 1, 2 })
),
map(
entry(
c(1001, true),
d(1.001f, 1.00001d, new byte[]{ 1, 9 }) /// 9 instead of 1
),
entry(
c(1002, true),
d(1.002f, 1.00002d, new byte[]{ 1, 2 })
)
));
//// SECOND OBJECT PAIR ////
addRec(fromStateEngine,
"recordTwo", 2,
cList(
c(2001, true),
c(2002, true),
c(2003, true)
),
dSet(
d(2.001f, 2.00001d, new byte[]{ 2, 1 }),
d(2.002f, 2.00002d, new byte[]{ 2, 2 })
),
map(
entry(
c(2001, true),
d(2.001f, 2.00001d, new byte[]{ 2, 1 })
),
entry(
c(2002, true),
d(2.002f, 2.00002d, new byte[]{ 2, 2 })
)
));
addRec(toStateEngine,
"recordTwo", 2,
cList(
c(2001, true),
c(2002, false), // now false instead of true
c(2003, true)
),
dSet(
d(2.001f, 2.00001d, new byte[]{ 2, 7 }), /// 7 instead of 1
d(2.002f, 2.00002d, new byte[]{ 2, 2 })
),
map(
entry(
c(2001, true),
d(2.001f, 2.00001d, new byte[]{ 2, 7 }) /// 7 instead of 1
),
entry(
c(2002, true),
d(2.002f, 2.00002d, new byte[]{ 2, 2 })
)
));
//// UNPAIRED OBJECTS ////
addRec(toStateEngine,
"recordThree", 3,
cList(), dSet(), map());
addRec(fromStateEngine,
"recordThree", 4,
cList(), dSet(), map());
fromStateEngine.addHeaderTag("tag1", "val1");
fromStateEngine.addHeaderTag("tag2", "fromVal2");
fromStateEngine.addHeaderTag("fromTag", "fromTagVal");
toStateEngine.addHeaderTag("tag1", "val1");
toStateEngine.addHeaderTag("tag2", "toVal2");
toStateEngine.addHeaderTag("toTag", "toTagVal");
HollowDiff diff = new HollowDiff(readEngine(fromStateEngine), readEngine(toStateEngine));
HollowTypeDiff typeDiff = diff.addTypeDiff("TypeA");
typeDiff.addMatchPath("a1");
typeDiff.addMatchPath("a2.b1");
diff.calculateDiffs();
return diff;
}
private HollowReadStateEngine readEngine(HollowWriteStateEngine writeEngine) throws IOException {
HollowBlobWriter writer = new HollowBlobWriter(writeEngine);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
writer.writeSnapshot(baos);
HollowReadStateEngine readEngine = new HollowReadStateEngine(true);
HollowBlobReader reader = new HollowBlobReader(readEngine);
reader.readSnapshot(HollowBlobInput.serial(baos.toByteArray()));
return readEngine;
}
private HollowWriteStateEngine newWriteStateEngine() {
HollowWriteStateEngine stateEngine = new HollowWriteStateEngine();
stateEngine.addTypeState(new HollowObjectTypeWriteState(typeASchema));
stateEngine.addTypeState(new HollowObjectTypeWriteState(typeBSchema));
stateEngine.addTypeState(new HollowObjectTypeWriteState(typeCSchema));
stateEngine.addTypeState(new HollowObjectTypeWriteState(typeDSchema));
stateEngine.addTypeState(new HollowListTypeWriteState(listOfTypeCSchema));
stateEngine.addTypeState(new HollowSetTypeWriteState(setOfTypeDSchema));
stateEngine.addTypeState(new HollowMapTypeWriteState(mapOfTypeCToTypeDSchema));
return stateEngine;
}
private int addRec(HollowWriteStateEngine stateEngine,
String a1, int b1,
TypeCRec[] typeCs,
TypeDRec[] typeDs,
MapEntry[] mapEntries) {
int listOrdinal = addListRec(stateEngine, typeCs);
int setOrdinal = addSetRec(stateEngine, typeDs);
int bOrdinal = addBRec(stateEngine, b1, listOrdinal, setOrdinal);
int mapOrdinal = addMapRec(stateEngine, mapEntries);
return addARec(stateEngine, a1, bOrdinal, mapOrdinal);
}
private int addARec(HollowWriteStateEngine stateEngine, String a1, int bOrdinal, int mapOrdinal) {
HollowObjectWriteRecord aRec = new HollowObjectWriteRecord(typeASchema);
aRec.setString("a1", a1);
aRec.setReference("a2", bOrdinal);
aRec.setReference("a3", mapOrdinal);
return stateEngine.add("TypeA", aRec);
}
private int addMapRec(HollowWriteStateEngine stateEngine, MapEntry[] mapEntries) {
HollowMapWriteRecord mapRec = new HollowMapWriteRecord();
for(MapEntry entry : mapEntries) {
int cOrdinal = addCRec(stateEngine, entry.key);
int dOrdinal = addDRec(stateEngine, entry.value);
mapRec.addEntry(cOrdinal, dOrdinal);
}
int mapOrdinal = stateEngine.add(mapOfTypeCToTypeDSchema.getName(), mapRec);
return mapOrdinal;
}
private int addBRec(HollowWriteStateEngine stateEngine, int b1, int listOrdinal, int setOrdinal) {
HollowObjectWriteRecord bRec = new HollowObjectWriteRecord(typeBSchema);
bRec.setInt("b1", b1);
bRec.setReference("b2", listOrdinal);
bRec.setReference("b3", setOrdinal);
int bOrdinal = stateEngine.add("TypeB", bRec);
return bOrdinal;
}
private int addListRec(HollowWriteStateEngine stateEngine, TypeCRec[] typeCs) {
HollowListWriteRecord listRec = new HollowListWriteRecord();
for(TypeCRec typeC : typeCs) {
listRec.addElement(addCRec(stateEngine, typeC));
}
int listOrdinal = stateEngine.add(listOfTypeCSchema.getName(), listRec);
return listOrdinal;
}
private int addSetRec(HollowWriteStateEngine stateEngine, TypeDRec[] typeDs) {
HollowSetWriteRecord setRec = new HollowSetWriteRecord();
for(TypeDRec typeD : typeDs) {
setRec.addElement(addDRec(stateEngine, typeD));
}
int setOrdinal = stateEngine.add(setOfTypeDSchema.getName(), setRec);
return setOrdinal;
}
private int addCRec(HollowWriteStateEngine stateEngine, TypeCRec typeC) {
HollowObjectWriteRecord rec = new HollowObjectWriteRecord(typeCSchema);
rec.setLong("c1", typeC.c1);
rec.setBoolean("c2", typeC.c2);
return stateEngine.add("TypeC", rec);
}
private int addDRec(HollowWriteStateEngine stateEngine, TypeDRec typeD) {
HollowObjectWriteRecord rec = new HollowObjectWriteRecord(typeDSchema);
rec.setFloat("d1", typeD.d1);
rec.setDouble("d2", typeD.d2);
rec.setBytes("d3", typeD.d3);
return stateEngine.add("TypeD", rec);
}
private TypeCRec[] cList(TypeCRec... cs) {
return cs;
}
private TypeDRec[] dSet(TypeDRec... ds) {
return ds;
}
private MapEntry[] map(MapEntry... entries) {
return entries;
}
private MapEntry entry(TypeCRec c, TypeDRec d) {
MapEntry entry = new MapEntry();
entry.key = c;
entry.value = d;
return entry;
}
private TypeCRec c(long c1, boolean c2) {
TypeCRec rec = new TypeCRec();
rec.c1 = c1;
rec.c2 = c2;
return rec;
}
private TypeDRec d(float d1, double d2, byte[] d3) {
TypeDRec rec = new TypeDRec();
rec.d1 = d1;
rec.d2 = d2;
rec.d3 = d3;
return rec;
}
private static class MapEntry {
TypeCRec key;
TypeDRec value;
}
private static class TypeCRec {
private long c1;
private boolean c2;
}
private static class TypeDRec {
private float d1;
private double d2;
private byte[] d3;
}
}
| 9,448 |
0 |
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/test/java/com/netflix/hollow/diffview/FakeHollowHistoryUtil.java
|
package com.netflix.hollow.diffview;
import static org.junit.Assert.assertEquals;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.write.HollowBlobWriter;
import com.netflix.hollow.core.write.HollowObjectTypeWriteState;
import com.netflix.hollow.core.write.HollowObjectWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.diffview.effigy.HollowEffigy;
import com.netflix.hollow.diffview.effigy.HollowEffigyFactory;
import com.netflix.hollow.history.ui.HollowHistoryUI;
import com.netflix.hollow.history.ui.model.HistoryStateTypeChanges;
import com.netflix.hollow.history.ui.model.RecordDiff;
import com.netflix.hollow.history.ui.naming.HollowHistoryRecordNamer;
import com.netflix.hollow.test.consumer.TestBlob;
import com.netflix.hollow.test.consumer.TestBlobRetriever;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import com.netflix.hollow.tools.history.HollowHistory;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateTypeKeyOrdinalMapping;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Utility to help create a delta chain (with reverse deltas) containing some history UI friendly fake data,
* and utilities to help compare outputs of HollowHistoryUIs for parity.
*/
public class FakeHollowHistoryUtil {
private static final String CUSTOM_VERSION_TAG = "myVersion";
public static void createDeltaChain(TestBlobRetriever testBlobRetriever) throws IOException {
HollowObjectSchema movieSchema = new HollowObjectSchema("Movie", 2, "id");
movieSchema.addField("id", HollowObjectSchema.FieldType.INT);
movieSchema.addField("name", HollowObjectSchema.FieldType.STRING);
HollowWriteStateEngine stateEngine = new HollowWriteStateEngine();
stateEngine.addTypeState(new HollowObjectTypeWriteState(movieSchema));
// v1
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v1");
addMovie(stateEngine, 1, "movie1-added-in-v1");
addMovie(stateEngine, 2, "movie2-added-in-v1");
addMovie(stateEngine, 3, "movie3-added-in-v1-removed-in-v2");
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v1 = new ByteArrayOutputStream();
HollowBlobWriter writer = new HollowBlobWriter(stateEngine);
writer.writeSnapshot(baos_v1);
testBlobRetriever.addSnapshot(1, new TestBlob(1,new ByteArrayInputStream(baos_v1.toByteArray())));
// v2
stateEngine.prepareForNextCycle();
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v2");
addMovie(stateEngine, 1, "movie1-added-in-v1");
addMovie(stateEngine, 2, "movie2-added-in-v1-modified-in-v2-removed-in-v5");
addMovie(stateEngine, 4, "movie4-added-in-v2");
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v1_to_v2 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v2_to_v1 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v2 = new ByteArrayOutputStream();
writer.writeSnapshot(baos_v2);
writer.writeDelta(baos_v1_to_v2);
writer.writeReverseDelta(baos_v2_to_v1);
testBlobRetriever.addSnapshot(2, new TestBlob(2,new ByteArrayInputStream(baos_v2.toByteArray())));
testBlobRetriever.addDelta(1, new TestBlob(1, 2, new ByteArrayInputStream(baos_v1_to_v2.toByteArray())));
testBlobRetriever.addReverseDelta(2, new TestBlob(2, 1, new ByteArrayInputStream(baos_v2_to_v1.toByteArray())));
// v3
stateEngine.prepareForNextCycle();
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v3");
addMovie(stateEngine, 1, "movie1-added-in-v1-modified-in-v3-removed-in-v4");
addMovie(stateEngine, 2, "movie2-added-in-v1-modified-in-v2-removed-in-v5");
addMovie(stateEngine, 4, "movie4-added-in-v2");
addMovie(stateEngine, 5, "movie5-added-in-v3-removed-in-v5");
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v2_to_v3 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v3_to_v2 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v3 = new ByteArrayOutputStream();
writer.writeSnapshot(baos_v3);
writer.writeDelta(baos_v2_to_v3);
writer.writeReverseDelta(baos_v3_to_v2);
testBlobRetriever.addSnapshot(3, new TestBlob(3,new ByteArrayInputStream(baos_v3.toByteArray())));
testBlobRetriever.addDelta(2, new TestBlob(2, 3, new ByteArrayInputStream(baos_v2_to_v3.toByteArray())));
testBlobRetriever.addReverseDelta(3, new TestBlob(3, 2, new ByteArrayInputStream(baos_v3_to_v2.toByteArray())));
// v4
stateEngine.prepareForNextCycle();
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v4");
addMovie(stateEngine, 2, "movie2-added-in-v1-modified-in-v2-removed-in-v5");
addMovie(stateEngine, 4, "movie4-added-in-v2-modified-in-v4");
addMovie(stateEngine, 5, "movie5-added-in-v3-removed-in-v5");
addMovie(stateEngine, 6, "movie6-added-in-v4");
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v4 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v3_to_v4 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v4_to_v3 = new ByteArrayOutputStream();
writer.writeSnapshot(baos_v4);
writer.writeDelta(baos_v3_to_v4);
writer.writeReverseDelta(baos_v4_to_v3);
testBlobRetriever.addSnapshot(4, new TestBlob(4,new ByteArrayInputStream(baos_v4.toByteArray())));
testBlobRetriever.addDelta(3, new TestBlob(3, 4, new ByteArrayInputStream(baos_v3_to_v4.toByteArray())));
testBlobRetriever.addReverseDelta(4, new TestBlob(4, 3, new ByteArrayInputStream(baos_v4_to_v3.toByteArray())));
// v5
stateEngine.prepareForNextCycle();
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v5");
addMovie(stateEngine, 4, "movie4-added-in-v2-modified-in-v4");
addMovie(stateEngine, 6, "movie6-added-in-v4-modified-in-v5");
addMovie(stateEngine, 7, "movie7-added-in-v5-removed-in-v6");
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v5 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v4_to_v5 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v5_to_v4 = new ByteArrayOutputStream();
writer.writeSnapshot(baos_v5);
writer.writeDelta(baos_v4_to_v5);
writer.writeReverseDelta(baos_v5_to_v4);
testBlobRetriever.addSnapshot(5, new TestBlob(5,new ByteArrayInputStream(baos_v5.toByteArray())));
testBlobRetriever.addDelta(4, new TestBlob(4, 5, new ByteArrayInputStream(baos_v4_to_v5.toByteArray())));
testBlobRetriever.addReverseDelta(5, new TestBlob(5, 4, new ByteArrayInputStream(baos_v5_to_v4.toByteArray())));
// v6 - only snapshot artifact, also contains new type in schema- to test double snapshots
stateEngine.prepareForNextCycle();
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v6");
addMovie(stateEngine, 4, "movie4-added-in-v2-modified-in-v4-also-modified-in-v6");
addMovie(stateEngine, 8, "movie8-added-in-v6");
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v6 = new ByteArrayOutputStream();
writer.writeSnapshot(baos_v6);
testBlobRetriever.addSnapshot(6, new TestBlob(6,new ByteArrayInputStream(baos_v6.toByteArray())));
// v7 - introduces schema change
stateEngine.prepareForNextCycle();
HollowObjectSchema actorSchema = new HollowObjectSchema("Actor", 1, "id");
actorSchema.addField("id", HollowObjectSchema.FieldType.INT);
stateEngine.addTypeState(new HollowObjectTypeWriteState(actorSchema));
stateEngine.addHeaderTag(CUSTOM_VERSION_TAG, "v7");
addMovie(stateEngine, 4, "movie4-added-in-v2-modified-in-v4-also-modified-in-v6");
addActor(stateEngine, 1);
stateEngine.prepareForWrite();
ByteArrayOutputStream baos_v7 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v6_to_v7 = new ByteArrayOutputStream();
ByteArrayOutputStream baos_v7_to_v6 = new ByteArrayOutputStream();
writer.writeSnapshot(baos_v7);
writer.writeDelta(baos_v6_to_v7);
writer.writeReverseDelta(baos_v7_to_v6);
testBlobRetriever.addSnapshot(7, new TestBlob(7, new ByteArrayInputStream(baos_v7.toByteArray())));
testBlobRetriever.addDelta(6, new TestBlob(6, 7, new ByteArrayInputStream(baos_v6_to_v7.toByteArray())));
testBlobRetriever.addReverseDelta(7, new TestBlob(7, 6, new ByteArrayInputStream(baos_v7_to_v6.toByteArray())));
// v0 - snapshot only - just to test that double snapshot can not be applied in reverse direction
HollowWriteStateEngine stateEngineV0 = new HollowWriteStateEngine();
stateEngineV0.addTypeState(new HollowObjectTypeWriteState(movieSchema));
addMovie(stateEngineV0, 0, "movie0-never-shows-up-in-ui");
stateEngineV0.prepareForWrite();
ByteArrayOutputStream baos_v0 = new ByteArrayOutputStream();
writer.writeSnapshot(baos_v0);
testBlobRetriever.addSnapshot(0, new TestBlob(0,new ByteArrayInputStream(baos_v0.toByteArray())));
}
public static void assertUiParity(HollowHistoryUI hui1, HollowHistoryUI hui2) {
HollowHistory h1 = hui1.getHistory();
HollowHistory h2 = hui2.getHistory();
List<RecordDiff> addedDiffs1, addedDiffs2, removedDiffs1, removedDiffs2, modifiedDiffs1, modifiedDiffs2;
HollowHistoricalState state1, state2;
//OverviewPage
assertEquals("Should have same number of Historical States", h1.getHistoricalStates().length, h2.getHistoricalStates().length);
for (int j = 0; j < h1.getHistoricalStates().length; j++) {
state1 = h1.getHistoricalStates()[j];
state2 = h2.getHistoricalStates()[j];
// make sure traversal is in the right order
assertEquals("Prev state should be the same", getPreviousStateVersion(state1, h1), getPreviousStateVersion(state2, h2));
assertEquals("Next state should be the same", getNextStateVersion(state1), getNextStateVersion(state2));
assertEquals("Same size of type mappings for historical state", state1.getKeyOrdinalMapping().getTypeMappings().size(), state2.getKeyOrdinalMapping().getTypeMappings().size());
assertEquals("Not same key set of type mappings for historical state", state1.getKeyOrdinalMapping().getTypeMappings().keySet(), state2.getKeyOrdinalMapping().getTypeMappings().keySet());
Map<String, String> headerTags1 = state1.getHeaderEntries();
Map<String, String> headerTags2 = state2.getHeaderEntries();
assertEquals(headerTags1, headerTags2);
for (String key : state2.getKeyOrdinalMapping().getTypeMappings().keySet()) {
HollowHistoricalStateTypeKeyOrdinalMapping typeKeyMapping1 = state1.getKeyOrdinalMapping().getTypeMappings().get(key);
HollowHistoricalStateTypeKeyOrdinalMapping typeKeyMapping2 = state2.getKeyOrdinalMapping().getTypeMappings().get(key);
assertEquals("No. of added records", typeKeyMapping1.getNumberOfNewRecords(), typeKeyMapping2.getNumberOfNewRecords());
assertEquals("No. of removed records", typeKeyMapping1.getNumberOfRemovedRecords(), typeKeyMapping2.getNumberOfRemovedRecords());
assertEquals("No. of modified records", typeKeyMapping1.getNumberOfModifiedRecords(), typeKeyMapping2.getNumberOfModifiedRecords());
HistoryStateTypeChanges typeChanges1 = new HistoryStateTypeChanges(state1, key, HollowHistoryRecordNamer.DEFAULT_RECORD_NAMER, new String[0]);
HistoryStateTypeChanges typeChanges2 = new HistoryStateTypeChanges(state2, key, HollowHistoryRecordNamer.DEFAULT_RECORD_NAMER, new String[0]);
addedDiffs1 = typeChanges1.getAddedRecords().getRecordDiffs();
addedDiffs2 = typeChanges2.getAddedRecords().getRecordDiffs();
removedDiffs1 = typeChanges1.getRemovedRecords().getRecordDiffs();
removedDiffs2 = typeChanges2.getRemovedRecords().getRecordDiffs();
modifiedDiffs1 = typeChanges1.getModifiedRecords().getRecordDiffs();
modifiedDiffs2 = typeChanges2.getModifiedRecords().getRecordDiffs();
assertEquals("Add Diffs size", addedDiffs1.size(), addedDiffs2.size());
assertEquals("Remove Diffs size", removedDiffs1.size(), removedDiffs2.size());
assertEquals("Modified Diffs size", modifiedDiffs1.size(), modifiedDiffs2.size());
assertEquals("Added subgroups (if any)", typeChanges1.getAddedRecords().hasSubGroups(), typeChanges2.getAddedRecords().hasSubGroups());
assertEquals("Removed subgroups (if any)", typeChanges1.getRemovedRecords().hasSubGroups(), typeChanges2.getRemovedRecords().hasSubGroups());
assertEquals("Added subgroups (if any)", typeChanges1.getModifiedRecords().hasSubGroups(), typeChanges2.getModifiedRecords().hasSubGroups());
HollowEffigyFactory effigyFactory = new HollowEffigyFactory();
Set<HollowEffigy> addedEffigies1 = new HashSet<>();
Set<HollowEffigy> addedEffigies2 = new HashSet<>();
if (!typeChanges1.getAddedRecords().isEmpty()) {
addedEffigies1 = toEffigies(addedDiffs1, effigyFactory, state1);
addedEffigies2 = toEffigies(addedDiffs2, effigyFactory, state2);
}
assertEquals(addedEffigies1, addedEffigies2);
Set<HollowEffigy> modifiedFromEffigies1 = new HashSet<>();
Set<HollowEffigy> modifiedToEffigies1 = new HashSet<>();
Set<HollowEffigy> modifiedFromEffigies2 = new HashSet<>();
Set<HollowEffigy> modifiedToEffigies2 = new HashSet<>();
if (!typeChanges1.getModifiedRecords().isEmpty()) {
modifiedFromEffigies1 = fromEffigies(modifiedDiffs1, effigyFactory, state1);
modifiedFromEffigies2 = fromEffigies(modifiedDiffs2, effigyFactory, state2);
modifiedToEffigies1 = toEffigies(modifiedDiffs1, effigyFactory, state1);
modifiedToEffigies2 = toEffigies(modifiedDiffs2, effigyFactory, state2);
}
assertEquals(modifiedFromEffigies1, modifiedFromEffigies2);
assertEquals(modifiedToEffigies1, modifiedToEffigies2);
Set<HollowEffigy> removedEffigies1 = new HashSet<>();
Set<HollowEffigy> removedEffigies2 = new HashSet<>();
if (!typeChanges1.getRemovedRecords().isEmpty()) {
removedEffigies1 = fromEffigies(removedDiffs1, effigyFactory, state1);
removedEffigies2 = fromEffigies(removedDiffs2, effigyFactory, state2);
}
assertEquals(removedEffigies1, removedEffigies2);
}
}
}
private static Set<HollowEffigy> fromEffigies(List<RecordDiff> recordDiffs, HollowEffigyFactory effigyFactory, HollowHistoricalState historicalState) {
Set<HollowEffigy> fromEffigies = new HashSet<>();
for (int i = 0; i < recordDiffs.size(); i++) {
RecordDiff recordDiff = recordDiffs.get(i);
HollowEffigy fromEffigy = effigyFactory.effigy(historicalState.getDataAccess(),
"Movie", recordDiff.getFromOrdinal());
fromEffigies.add(fromEffigy);
}
return fromEffigies;
}
private static Set<HollowEffigy> toEffigies(List<RecordDiff> recordDiffs, HollowEffigyFactory effigyFactory, HollowHistoricalState historicalState) {
Set<HollowEffigy> toEffigies = new HashSet<>();
for (int i = 0; i < recordDiffs.size(); i++) {
RecordDiff recordDiff = recordDiffs.get(i);
HollowEffigy toEffigy = effigyFactory.effigy(historicalState.getDataAccess(),
"Movie", recordDiff.getToOrdinal());
toEffigies.add(toEffigy);
}
return toEffigies;
}
private static long getNextStateVersion(HollowHistoricalState currentHistoricalState) {
if (currentHistoricalState.getNextState() != null)
return currentHistoricalState.getNextState().getVersion();
return -1;
}
private static long getPreviousStateVersion(HollowHistoricalState currentHistoricalState, HollowHistory history) {
for(HollowHistoricalState state : history.getHistoricalStates()) {
if(state.getNextState() == currentHistoricalState) {
return state.getVersion();
}
}
return -1;
}
private static void addMovie(HollowWriteStateEngine stateEngine, int id, String name) {
HollowObjectWriteRecord rec = new HollowObjectWriteRecord((HollowObjectSchema) stateEngine.getSchema("Movie"));
rec.setInt("id", id);
rec.setString("name", name);
stateEngine.add("Movie", rec);
}
private static void addActor(HollowWriteStateEngine stateEngine, int id) {
HollowObjectWriteRecord rec = new HollowObjectWriteRecord((HollowObjectSchema) stateEngine.getSchema("Actor"));
rec.setInt("id", id);
stateEngine.add("Actor", rec);
}
}
| 9,449 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/DiffUIServer.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.ui.UIServer;
public interface DiffUIServer extends UIServer {
HollowDiffUI addDiff(String diffPath, HollowDiff diff, String fromBlobName, String toBlobName);
}
| 9,450 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/HollowDiffUI.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui;
import static com.netflix.hollow.ui.HollowUISession.getSession;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.diff.ui.pages.DiffFieldPage;
import com.netflix.hollow.diff.ui.pages.DiffObjectPage;
import com.netflix.hollow.diff.ui.pages.DiffOverviewPage;
import com.netflix.hollow.diff.ui.pages.DiffPage;
import com.netflix.hollow.diff.ui.pages.DiffTypePage;
import com.netflix.hollow.diffview.DiffViewOutputGenerator;
import com.netflix.hollow.diffview.HollowDiffViewProvider;
import com.netflix.hollow.diffview.HollowObjectViewProvider;
import com.netflix.hollow.diffview.effigy.CustomHollowEffigyFactory;
import com.netflix.hollow.diffview.effigy.HollowRecordDiffUI;
import com.netflix.hollow.diffview.effigy.pairer.exact.DiffExactRecordMatcher;
import com.netflix.hollow.diffview.effigy.pairer.exact.ExactRecordMatcher;
import com.netflix.hollow.tools.diff.HollowDiff;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.velocity.app.VelocityEngine;
public class HollowDiffUI implements HollowRecordDiffUI {
private final String baseURLPath;
private final String diffUIPath;
private final HollowDiff diff;
private final VelocityEngine velocity;
private final String fromBlobName;
private final String toBlobName;
private final DiffOverviewPage overviewPage;
private final DiffTypePage typePage;
private final DiffFieldPage fieldPage;
private final DiffObjectPage objectPage;
private final HollowObjectViewProvider viewProvider;
private final DiffViewOutputGenerator diffViewOutputGenerator;
private final Map<String, PrimaryKey> matchHints;
private final Map<String, CustomHollowEffigyFactory> customHollowEffigyFactories;
private final ExactRecordMatcher exactRecordMatcher;
HollowDiffUI(String baseURLPath, String diffUIPath, HollowDiff diff, String fromBlobName, String toBlobName, VelocityEngine ve) {
this.baseURLPath = baseURLPath;
this.diffUIPath = (diffUIPath == null || diffUIPath.length() == 0) ? baseURLPath : baseURLPath + "/" + diffUIPath;
this.diff = diff;
this.velocity = ve;
this.fromBlobName = fromBlobName;
this.toBlobName = toBlobName;
this.overviewPage = new DiffOverviewPage(this);
this.typePage = new DiffTypePage(this);
this.fieldPage = new DiffFieldPage(this);
this.objectPage = new DiffObjectPage(this);
this.viewProvider = new HollowDiffViewProvider(this);
this.diffViewOutputGenerator = new DiffViewOutputGenerator(viewProvider);
this.customHollowEffigyFactories = new HashMap<String, CustomHollowEffigyFactory>();
this.matchHints = new HashMap<String, PrimaryKey>();
this.exactRecordMatcher = new DiffExactRecordMatcher(diff.getEqualityMapping());
}
public boolean serveRequest(String pageName, HttpServletRequest req, HttpServletResponse resp) throws IOException {
if("diffrowdata".equals(pageName)) {
diffViewOutputGenerator.uncollapseRow(req, resp);
return true;
} else if("collapsediffrow".equals(pageName)) {
diffViewOutputGenerator.collapseRow(req, resp);
return true;
}
resp.setContentType("text/html");
if("".equals(pageName) || "overview".equals(pageName)) {
render(overviewPage, req, resp);
} else if("typediff".equals(pageName)) {
render(typePage, req, resp);
} else if("fielddiff".equals(pageName)) {
render(fieldPage, req, resp);
} else if("objectdiff".equals(pageName)) {
render(objectPage, req, resp);
} else {
return false;
}
return true;
}
public HollowDiff getDiff() {
return diff;
}
public String getFromBlobName() {
return fromBlobName;
}
public String getToBlobName() {
return toBlobName;
}
public VelocityEngine getVelocity() {
return velocity;
}
public String getBaseURLPath() {
return baseURLPath;
}
public String getDiffUIPath() {
return diffUIPath;
}
public void addCustomHollowEffigyFactory(String typeName, CustomHollowEffigyFactory factory) {
customHollowEffigyFactories.put(typeName, factory);
}
@Override
public CustomHollowEffigyFactory getCustomHollowEffigyFactory(String typeName) {
return customHollowEffigyFactories.get(typeName);
}
public void addMatchHint(PrimaryKey matchHint) {
this.matchHints.put(matchHint.getType(), matchHint);
}
@Override
public Map<String, PrimaryKey> getMatchHints() {
return matchHints;
}
@Override
public ExactRecordMatcher getExactRecordMatcher() {
return exactRecordMatcher;
}
public HollowObjectViewProvider getHollowObjectViewProvider() {
return viewProvider;
}
public DiffViewOutputGenerator getDiffViewOutputGenerator() {
return diffViewOutputGenerator;
}
private void render(DiffPage page, HttpServletRequest req, HttpServletResponse resp) throws IOException {
page.render(req, getSession(req, resp), resp.getWriter());
}
}
| 9,451 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/HollowDiffUIRouter.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.ui.HollowUIRouter;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class HollowDiffUIRouter extends HollowUIRouter {
private final Map<String, HollowDiffUI> diffUIs;
public HollowDiffUIRouter() {
this("");
}
public HollowDiffUIRouter(String baseUrlPath) {
super(baseUrlPath);
this.diffUIs = new LinkedHashMap<>();
}
public boolean handle(String target, HttpServletRequest req, HttpServletResponse resp)
throws IOException {
String diffUIKey = getTargetRootPath(target);
if ("resource".equals(diffUIKey)) {
if (serveResource(req, resp, getResourceName(target, diffUIKey)))
return true;
} else {
HollowDiffUI ui = diffUIs.get(diffUIKey);
if (ui == null) {
ui = diffUIs.get("");
if (ui != null) { // if a diff was added at path ""
diffUIKey = "";
}
}
if (ui != null) {
if (ui.serveRequest(getResourceName(target, diffUIKey), req, resp))
return true;
}
}
return false;
}
public Map<String, HollowDiffUI> getDiffUIs() {
return diffUIs;
}
public HollowDiffUI addDiff(String diffPath, HollowDiff diff, String fromBlobName, String toBlobName) {
HollowDiffUI diffUI = new HollowDiffUI(baseUrlPath, diffPath, diff, fromBlobName, toBlobName, velocityEngine);
diffUIs.put(diffPath, diffUI);
return diffUI;
}
public void removeDiff(String diffPath) {
diffUIs.remove(diffPath);
}
}
| 9,452 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/HollowDiffUIServer.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui;
import com.netflix.hollow.tools.diff.HollowDiff;
public class HollowDiffUIServer {
private final DiffUIServer server;
public HollowDiffUIServer() {
this(8080);
}
public HollowDiffUIServer(int port) {
this.server = new DiffUIWebServer(new HollowDiffUIRouter(), port);
}
public HollowDiffUI addDiff(String diffPath, HollowDiff diff) {
return addDiff(diffPath, diff, "FROM", "TO");
}
public HollowDiffUI addDiff(String diffPath, HollowDiff diff, String fromBlobName, String toBlobName) {
return server.addDiff(diffPath, diff, fromBlobName, toBlobName);
}
public HollowDiffUIServer start() throws Exception {
server.start();
return this;
}
public HollowDiffUIServer join() throws InterruptedException {
server.join();
return this;
}
public void stop() throws Exception {
server.stop();
}
}
| 9,453 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/DiffUIWebServer.java
|
package com.netflix.hollow.diff.ui;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.ui.HollowUIWebServer;
import com.netflix.hollow.ui.HttpHandlerWithServletSupport;
class DiffUIWebServer extends HollowUIWebServer implements DiffUIServer {
private final HollowDiffUIRouter router;
public DiffUIWebServer(HollowDiffUIRouter router, int port) {
super(new HttpHandlerWithServletSupport(router), port);
this.router = router;
}
public HollowDiffUI addDiff(String diffPath, HollowDiff diff, String fromBlobName, String toBlobName) {
return this.router.addDiff(diffPath, diff, fromBlobName, toBlobName);
}
}
| 9,454 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/jetty/HollowDiffUIServer.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui.jetty;
import com.netflix.hollow.diff.ui.HollowDiffUI;
import com.netflix.hollow.tools.diff.HollowDiff;
/**
* @deprecated use {@link com.netflix.hollow.diff.ui.HollowDiffUIServer}. This is deprecated because package name
* contains "jetty" but jetty-server dep is no longer required. Instead, this class lives on as an adapter
* over {@link com.netflix.hollow.diff.ui.HollowDiffUIServer}.
*/
@Deprecated
public class HollowDiffUIServer {
private final com.netflix.hollow.diff.ui.HollowDiffUIServer server;
public HollowDiffUIServer() {
server = new com.netflix.hollow.diff.ui.HollowDiffUIServer();
}
public HollowDiffUIServer(int port) {
server = new com.netflix.hollow.diff.ui.HollowDiffUIServer(port);
}
public HollowDiffUI addDiff(String diffPath, HollowDiff diff) {
return server.addDiff(diffPath, diff);
}
public HollowDiffUI addDiff(String diffPath, HollowDiff diff, String fromBlobName, String toBlobName) {
return server.addDiff(diffPath, diff, fromBlobName, toBlobName);
}
public HollowDiffUIServer start() throws Exception {
server.start();
return this;
}
public HollowDiffUIServer join() throws InterruptedException {
server.join();
return this;
}
public void stop() throws Exception {
server.stop();
}
}
| 9,455 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/model/HollowDiffUIBreadcrumbs.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui.model;
public class HollowDiffUIBreadcrumbs {
private final String link;
private final String displayText;
public HollowDiffUIBreadcrumbs(String link, String displayText) {
this.link = link;
this.displayText = displayText;
}
public String getLink() {
return link;
}
public String getDisplayText() {
return displayText;
}
}
| 9,456 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/model/HollowHeaderEntry.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui.model;
public class HollowHeaderEntry {
private final int idx;
private final String key;
private final String fromValue;
private final String toValue;
public HollowHeaderEntry(int idx, String key, String fromValue, String toValue) {
this.idx = idx;
this.key = key;
this.fromValue = fromValue;
this.toValue = toValue;
}
public int getIdx() {
return idx;
}
public String getBgColor() {
if(isSame())
return "";
return "#FFCC99";
}
public boolean isSame() {
return fromValue==null ? toValue==null : fromValue.equals(toValue);
}
public String getKey() {
return key;
}
public String getFromValue() {
return fromValue == null ? "null" : fromValue;
}
public String getToValue() {
return toValue == null ? "null" : toValue;
}
}
| 9,457 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/model/HollowDiffOverviewTypeEntry.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui.model;
import static com.netflix.hollow.ui.HollowDiffUtil.formatBytes;
public class HollowDiffOverviewTypeEntry {
private final String typeName;
private final boolean hasUniqueKey;
private final long totalDiffScore;
private final int unmatchedInFrom;
private final int unmatchedInTo;
private final int totalInFrom;
private final int totalInTo;
private final long heapInFrom;
private final long heapInTo;
private final long holeInFrom;
private final long holeInTo;
public HollowDiffOverviewTypeEntry(String typeName, long totalDiffScore, int unmatchedInFrom, int unmatchedInTo, int totalInFrom, int totalInTo) {
this(typeName, false, totalDiffScore, unmatchedInFrom, unmatchedInTo, totalInFrom, totalInTo, 0, 0, 0, 0);
}
public HollowDiffOverviewTypeEntry(String typeName, long totalDiffScore, int unmatchedInFrom, int unmatchedInTo, int totalInFrom, int totalInTo,
long heapInFrom, long heapInTo, long holeInFrom, long holeInTo) {
this(typeName, false, totalDiffScore, unmatchedInFrom, unmatchedInTo, totalInFrom, totalInTo, heapInFrom, heapInTo, holeInFrom, holeInTo);
}
public HollowDiffOverviewTypeEntry(String typeName, boolean hasUniqueKey, long totalDiffScore, int unmatchedInFrom, int unmatchedInTo, int totalInFrom, int totalInTo,
long heapInFrom, long heapInTo, long holeInFrom, long holeInTo) {
this.typeName = typeName;
this.hasUniqueKey = hasUniqueKey;
this.totalDiffScore = totalDiffScore;
this.unmatchedInFrom = unmatchedInFrom;
this.unmatchedInTo = unmatchedInTo;
this.totalInFrom = totalInFrom;
this.totalInTo = totalInTo;
this.heapInFrom = heapInFrom;
this.heapInTo = heapInTo;
this.holeInFrom = holeInFrom;
this.holeInTo = holeInTo;
}
public String getTypeName() {
return typeName;
}
public boolean hasUniqueKey() { return hasUniqueKey;}
public boolean hasUnmatched() { return unmatchedInFrom > 0 || unmatchedInTo > 0; }
public boolean hasData() { return totalInFrom!=0 || totalInTo!=0; }
public long getTotalDiffScore() {
return totalDiffScore;
}
public int getUnmatchedInFrom() {
return unmatchedInFrom;
}
public int getUnmatchedInTo() {
return unmatchedInTo;
}
public int getTotalInFrom() {
return totalInFrom;
}
public int getTotalInTo() {
return totalInTo;
}
public int getDeltaSize() { return Math.abs(totalInFrom - totalInTo); }
public long getHeapInFrom() { return heapInFrom; }
public long getHeapInTo() { return heapInTo; }
public long getHoleInFrom() { return holeInFrom; }
public long getHoleInTo() { return holeInTo; }
public String getHeapInFromFormatted() { return formatBytes(heapInFrom); }
public String getHeapInToFormatted() { return formatBytes(heapInTo); }
public String getHoleInFromFormatted() { return formatBytes(holeInFrom); }
public String getHoleInToFormatted() { return formatBytes(holeInTo); }
public String getBgColor() {
if (totalInFrom == 0 && totalInTo == 0)
return "#D0D0D0"; // No Data
else if (!hasUniqueKey) {
if (totalInFrom!=totalInTo) return "#F0E592";
return "#FFFBDB"; // No Unique Key
} else if (totalDiffScore > 0 || unmatchedInFrom > 0 || unmatchedInTo > 0)
return "#FFCC99"; // Has Diff
return "";
}
}
| 9,458 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/model/HollowUnmatchedObject.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui.model;
public class HollowUnmatchedObject {
private final String displayKey;
private final int ordinal;
public HollowUnmatchedObject(String displayKey, int ordinal) {
this.displayKey = displayKey;
this.ordinal = ordinal;
}
public String getDisplayKey() {
return displayKey;
}
public int getOrdinal() {
return ordinal;
}
}
| 9,459 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/model/HollowObjectPairDiffScore.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui.model;
public class HollowObjectPairDiffScore implements Comparable<HollowObjectPairDiffScore> {
private final String displayKey;
private final int fromOrdinal;
private final int toOrdinal;
private int diffScore;
public HollowObjectPairDiffScore(String displayKey, int fromOrdinal, int toOrdinal) {
this(displayKey, fromOrdinal, toOrdinal, 0);
}
public HollowObjectPairDiffScore(String displayKey, int fromOrdinal, int toOrdinal, int score) {
this.displayKey = displayKey;
this.fromOrdinal = fromOrdinal;
this.toOrdinal = toOrdinal;
this.diffScore = score;
}
public String getDisplayKey() {
return displayKey;
}
public int getFromOrdinal() {
return fromOrdinal;
}
public int getToOrdinal() {
return toOrdinal;
}
public int getDiffScore() {
return diffScore;
}
public void incrementDiffScore(int incrementBy) {
diffScore += incrementBy;
}
@Override
public int compareTo(HollowObjectPairDiffScore o) {
return o.getDiffScore() - diffScore;
}
}
| 9,460 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/model/HollowFieldDiffScore.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui.model;
public class HollowFieldDiffScore implements Comparable<HollowFieldDiffScore> {
private final String typeName;
private final int typeFieldIndex;
private final String displayName;
private final int numDiffObjects;
private final int numTotalObjectPairs;
private final long diffScore;
public HollowFieldDiffScore(String typeName, int typeFieldIndex, String displayName, int numDiffObjects, int numTotalObjectPairs, long diffScore) {
this.typeName = typeName;
this.typeFieldIndex = typeFieldIndex;
this.displayName = displayName;
this.numDiffObjects = numDiffObjects;
this.numTotalObjectPairs = numTotalObjectPairs;
this.diffScore = diffScore;
}
public String getTypeName() {
return typeName;
}
public int getTypeFieldIndex() {
return typeFieldIndex;
}
public String getDisplayName() {
return displayName;
}
public int getNumDiffObjects() {
return numDiffObjects;
}
public int getNumTotalObjectPairs() {
return numTotalObjectPairs;
}
public long getDiffScore() {
return diffScore;
}
@Override
public int compareTo(HollowFieldDiffScore o) {
return o.getNumDiffObjects() - numDiffObjects;
}
}
| 9,461 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/pages/DiffPage.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui.pages;
import static com.netflix.hollow.ui.HollowDiffUtil.formatBytes;
import com.netflix.hollow.diff.ui.HollowDiffUI;
import com.netflix.hollow.diff.ui.model.HollowHeaderEntry;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import com.netflix.hollow.ui.HollowUISession;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
public abstract class DiffPage {
protected final HollowDiffUI diffUI;
protected final Template template;
protected final Template headerTemplate;
protected final Template footerTemplate;
protected String env = "";
protected boolean isHeaderEnabled = false;
public DiffPage(HollowDiffUI diffUI, String templateName) {
this.diffUI = diffUI;
this.template = diffUI.getVelocity().getTemplate(templateName);
this.headerTemplate = diffUI.getVelocity().getTemplate("diff-header.vm");
this.footerTemplate = diffUI.getVelocity().getTemplate("diff-footer.vm");
}
public void render(HttpServletRequest req, HollowUISession session, Writer writer) {
processCookies(req);
VelocityContext ctx = new VelocityContext();
ctx.put("request", req);
ctx.put("env", env);
ctx.put("isHeaderEnabled", isHeaderEnabled);
ctx.put("basePath", diffUI.getBaseURLPath());
ctx.put("path", diffUI.getDiffUIPath());
ctx.put("fromBlobName", diffUI.getFromBlobName());
ctx.put("toBlobName", diffUI.getToBlobName());
HollowDiff diff = diffUI.getDiff();
long heapFrom = diff.getFromStateEngine().calcApproxDataSize();
long heapTo = diff.getToStateEngine().calcApproxDataSize();
long heapDiff = heapTo-heapFrom;
ctx.put("fromHeap", formatBytes(heapFrom));
ctx.put("toHeap", formatBytes(heapTo));
ctx.put("diffHeap", (heapDiff > 0 ? "+" : "") + formatBytes(heapDiff));
ctx.put("diffHeap_cssClass", heapDiff<=0 ? "heap_dec" : "heap_inc");
setUpContext(req, session, ctx);
ctx.put("headerEntries", getHeaderEntries());
headerTemplate.merge(ctx, writer);
template.merge(ctx, writer);
footerTemplate.merge(ctx, writer);
}
private void processCookies(HttpServletRequest request) {
Cookie[] cookies = request.getCookies();
if (cookies != null) {
for (Cookie cookie : cookies) {
String name = cookie.getName();
String value = cookie.getValue();
if ("env".equals(name)) {
env = value;
} else if ("isHeaderEnabled".equals(name)) {
isHeaderEnabled = Boolean.valueOf(value);
}
}
}
}
protected abstract void setUpContext(HttpServletRequest req, HollowUISession session, VelocityContext ctx);
protected HollowDiff getDiff() {
return diffUI.getDiff();
}
protected int intParam(HttpServletRequest req, HollowUISession session, String ctx, String paramName, int defaultValue) {
return Integer.parseInt(param(req, session, ctx, paramName, String.valueOf(defaultValue)));
}
protected boolean boolParam(HttpServletRequest req, HollowUISession session, String ctx, String paramName, boolean defaultValue) {
return Boolean.parseBoolean(param(req, session, ctx, paramName, String.valueOf(defaultValue)));
}
protected String param(HttpServletRequest req, HollowUISession session, String ctx, String paramName, String defaultValue) {
String sessionParamName = ctx + "_" + paramName;
String reqParam = req.getParameter(paramName);
if(reqParam != null) {
session.setAttribute(sessionParamName, reqParam);
return reqParam;
}
String sessionParam = (String) session.getAttribute(sessionParamName);
if(sessionParam != null)
return sessionParam;
return defaultValue;
}
protected HollowTypeDiff getTypeDiff(String typeName) {
for(HollowTypeDiff typeDiff : getDiff().getTypeDiffs()) {
if(typeDiff.getTypeName().equals(typeName))
return typeDiff;
}
return null;
}
private List<HollowHeaderEntry> getHeaderEntries() {
Map<String, String> fromTags = diffUI.getDiff().getFromStateEngine().getHeaderTags();
Map<String, String> toTags = diffUI.getDiff().getToStateEngine().getHeaderTags();
Set<String> allKeys = new HashSet<String>();
allKeys.addAll(fromTags.keySet());
allKeys.addAll(toTags.keySet());
List<HollowHeaderEntry> entries = new ArrayList<HollowHeaderEntry>();
int i=0;
for(String key : allKeys) {
entries.add(new HollowHeaderEntry(i++, key, fromTags.get(key), toTags.get(key)));
}
return entries;
}
}
| 9,462 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/pages/DiffObjectPage.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui.pages;
import com.netflix.hollow.diff.ui.HollowDiffUI;
import com.netflix.hollow.diff.ui.model.HollowDiffUIBreadcrumbs;
import com.netflix.hollow.diffview.HollowDiffHtmlKickstarter;
import com.netflix.hollow.diffview.HollowObjectView;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import com.netflix.hollow.ui.HollowUISession;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.apache.velocity.VelocityContext;
public class DiffObjectPage extends DiffPage {
public DiffObjectPage(HollowDiffUI diffUI) {
super(diffUI, "diff-object.vm");
}
@Override
protected void setUpContext(HttpServletRequest req, HollowUISession session, VelocityContext ctx) {
String type = req.getParameter("type");
int fromOrdinal = Integer.parseInt(req.getParameter("fromOrdinal"));
int toOrdinal = Integer.parseInt(req.getParameter("toOrdinal"));
int fieldIdx = -1;
if(req.getParameter("fieldIdx") != null)
fieldIdx = Integer.parseInt(req.getParameter("fieldIdx"));
ctx.put("typeName", type);
ctx.put("fromOrdinal", fromOrdinal);
ctx.put("toOrdinal", toOrdinal);
HollowObjectView diffView = diffUI.getHollowObjectViewProvider().getObjectView(req, session);
HollowDiffHtmlKickstarter htmlKickstarter = new HollowDiffHtmlKickstarter(diffUI.getBaseURLPath());
ctx.put("initialHtml", htmlKickstarter.initialHtmlRows(diffView));
ctx.put("breadcrumbs", getBreadcrumbs(type, fieldIdx, fromOrdinal, toOrdinal));
}
private List<HollowDiffUIBreadcrumbs> getBreadcrumbs(String type, int fieldIdx, int fromOrdinal, int toOrdinal) {
HollowTypeDiff typeDiff = getTypeDiff(type);
List<HollowDiffUIBreadcrumbs> breadcrumbs = new ArrayList<HollowDiffUIBreadcrumbs>();
breadcrumbs.add(new HollowDiffUIBreadcrumbs((diffUI.getDiffUIPath() == null || diffUI.getDiffUIPath().length() == 0) ?
"/" : diffUI.getDiffUIPath(), "Overview"));
breadcrumbs.add(new HollowDiffUIBreadcrumbs((diffUI.getDiffUIPath() == null || diffUI.getDiffUIPath().length() == 0) ?
"typediff?type=" + type : diffUI.getDiffUIPath() + "/typediff?type=" + type, type));
if(fieldIdx != -1) {
breadcrumbs.add(new HollowDiffUIBreadcrumbs((diffUI.getDiffUIPath() == null || diffUI.getDiffUIPath().length() == 0) ?
"fielddiff?type=" + type + "&fieldIdx=" + fieldIdx : diffUI.getDiffUIPath() + "/fielddiff?type=" + type + "&fieldIdx=" + fieldIdx, typeDiff.getFieldDiffs().get(fieldIdx).getFieldIdentifier().toString()));
}
String displayKey =
fromOrdinal != -1 ?
typeDiff.getMatcher().getKeyDisplayString(typeDiff.getFromTypeState(), fromOrdinal)
: typeDiff.getMatcher().getKeyDisplayString(typeDiff.getToTypeState(), toOrdinal);
breadcrumbs.add(new HollowDiffUIBreadcrumbs(null, displayKey));
return breadcrumbs;
}
}
| 9,463 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/pages/DiffOverviewPage.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui.pages;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.diff.ui.HollowDiffUI;
import com.netflix.hollow.diff.ui.model.HollowDiffOverviewTypeEntry;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import com.netflix.hollow.ui.HollowUISession;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.apache.velocity.VelocityContext;
public class DiffOverviewPage extends DiffPage {
public DiffOverviewPage(HollowDiffUI diffUI) {
super(diffUI, "diff-overview.vm");
}
@Override
protected void setUpContext(HttpServletRequest req, HollowUISession session, VelocityContext ctx) {
String sortBy = param(req, session, "overview", "sortBy", "diffs");
ctx.put("typeOverviewEntries", getTypeEntries(sortBy));
}
private List<HollowDiffOverviewTypeEntry> getTypeEntries(String sortBy) {
List<HollowDiffOverviewTypeEntry> overviewEntries = new ArrayList<>();
for(HollowTypeDiff diff : getDiff().getTypeDiffs()) {
long totalDiffScore = diff.getTotalDiffScore();
int unmatchedInFrom = diff.getUnmatchedOrdinalsInFrom().size();
int unmatchedInTo = diff.getUnmatchedOrdinalsInTo().size();
int fromCount = 0;
try {
fromCount = diff.getTotalItemsInFromState();
} catch (Exception ex) {
System.out.println("DIFF_ERROR: Unable to getTotalItemsInFromState for type=" + diff.getTypeName());
ex.printStackTrace();
}
int toCount = 0;
try {
toCount = diff.getTotalItemsInToState();
} catch (Exception ex) {
System.out.println("DIFF_ERROR: Unable to getTotalItemsInToState for type=" + diff.getTypeName());
ex.printStackTrace();
}
HollowTypeReadState fromTypeState = diff.getFromTypeState();
HollowTypeReadState toTypeState = diff.getToTypeState();
overviewEntries.add(new HollowDiffOverviewTypeEntry(diff.getTypeName(), diff.hasMatchPaths(), totalDiffScore, unmatchedInFrom, unmatchedInTo, fromCount, toCount,
fromTypeState==null ? 0:fromTypeState.getApproximateHeapFootprintInBytes(), toTypeState==null ? 0:toTypeState.getApproximateHeapFootprintInBytes(),
fromTypeState==null ? 0:fromTypeState.getApproximateHoleCostInBytes(), toTypeState==null ? 0:toTypeState.getApproximateHoleCostInBytes()));
}
if(sortBy == null || "diffs".equals(sortBy)) {
overviewEntries.sort((o1, o2) -> {
int result = Comparator
.comparing(HollowDiffOverviewTypeEntry::getTotalDiffScore)
.thenComparing(HollowDiffOverviewTypeEntry::getDeltaSize)
.thenComparing(HollowDiffOverviewTypeEntry::hasData)
.thenComparing(HollowDiffOverviewTypeEntry::hasUnmatched)
.thenComparing(HollowDiffOverviewTypeEntry::hasUniqueKey)
.compare(o2, o1);
// Fallback to Type Name Ordering
if (result==0) {
return o1.getTypeName().compareTo(o2.getTypeName());
}
return result;
});
} else if("unmatchedFrom".equals(sortBy)) {
overviewEntries.sort((o1, o2) -> o2.getUnmatchedInFrom() - o1.getUnmatchedInFrom());
} else if("unmatchedTo".equals(sortBy)) {
overviewEntries.sort((o1, o2) -> o2.getUnmatchedInTo() - o1.getUnmatchedInTo());
} else if("fromCount".equals(sortBy)) {
overviewEntries.sort((o1, o2) -> o2.getTotalInFrom() - o1.getTotalInFrom());
} else if("toCount".equals(sortBy)) {
overviewEntries.sort((o1, o2) -> o2.getTotalInTo() - o1.getTotalInTo());
} else if("fromHeap".equals(sortBy)) {
overviewEntries.sort((o1, o2) -> (int) (o2.getHeapInFrom() - o1.getHeapInFrom()));
} else if("toHeap".equals(sortBy)) {
overviewEntries.sort((o1, o2) -> (int) (o2.getHeapInTo() - o1.getHeapInTo()));
} else if("fromHole".equals(sortBy)) {
overviewEntries.sort((o1, o2) -> (int) (o2.getHoleInFrom() - o1.getHoleInFrom()));
} else if("toHole".equals(sortBy)) {
overviewEntries.sort((o1, o2) -> (int) (o2.getHoleInTo() - o1.getHoleInTo()));
}
return overviewEntries;
}
}
| 9,464 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/pages/DiffFieldPage.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui.pages;
import com.netflix.hollow.diff.ui.HollowDiffUI;
import com.netflix.hollow.diff.ui.model.HollowDiffUIBreadcrumbs;
import com.netflix.hollow.diff.ui.model.HollowObjectPairDiffScore;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import com.netflix.hollow.tools.diff.count.HollowFieldDiff;
import com.netflix.hollow.ui.HollowUISession;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.apache.velocity.VelocityContext;
public class DiffFieldPage extends DiffPage {
public DiffFieldPage(HollowDiffUI diffUI) {
super(diffUI, "diff-field.vm");
}
@Override
protected void setUpContext(HttpServletRequest req, HollowUISession session, VelocityContext ctx) {
String typeName = req.getParameter("type");
HollowTypeDiff typeDiff = getTypeDiff(typeName);
int fieldIdx = Integer.parseInt(req.getParameter("fieldIdx"));
HollowFieldDiff fieldDiff = typeDiff.getFieldDiffs().get(fieldIdx);
String sessionPageCtx = typeName + ":" + fieldIdx;
int diffPairBeginIdx = intParam(req, session, sessionPageCtx, "diffPairBeginIdx", 0);
int diffPairPageSize = intParam(req, session, sessionPageCtx, "diffPairPageSize", 25);
List<HollowObjectPairDiffScore> pairs = getObjectDiffScores(typeDiff, fieldDiff, diffPairBeginIdx, diffPairPageSize);
ctx.put("objectScorePairs", pairs);
ctx.put("typeDiff", typeDiff);
ctx.put("fieldDiff", fieldDiff);
ctx.put("fieldIdx", fieldIdx);
if(diffPairBeginIdx > 0)
ctx.put("previousDiffPairPageBeginIdx", diffPairBeginIdx - diffPairPageSize);
if((diffPairBeginIdx + diffPairPageSize) < fieldDiff.getNumDiffs())
ctx.put("nextDiffPairPageBeginIdx", diffPairBeginIdx + diffPairPageSize);
ctx.put("breadcrumbs", getBreadcrumbs(typeDiff, fieldDiff));
}
private List<HollowObjectPairDiffScore> getObjectDiffScores(HollowTypeDiff typeDiff, HollowFieldDiff fieldDiff, int beginRecord, int pageSize) {
List<HollowObjectPairDiffScore> list = new ArrayList<HollowObjectPairDiffScore>();
for(int i=0;i<fieldDiff.getNumDiffs();i++) {
int fromOrdinal = fieldDiff.getFromOrdinal(i);
int toOrdinal = fieldDiff.getToOrdinal(i);
String displayKey = typeDiff.getMatcher().getKeyDisplayString(typeDiff.getFromTypeState(), fieldDiff.getFromOrdinal(i));
list.add(new HollowObjectPairDiffScore(displayKey, fromOrdinal, toOrdinal, fieldDiff.getPairScore(i)));
}
Collections.sort(list);
return list.subList(beginRecord, beginRecord + pageSize > list.size() ? list.size() : beginRecord + pageSize);
}
private List<HollowDiffUIBreadcrumbs> getBreadcrumbs(HollowTypeDiff typeDiff, HollowFieldDiff fieldDiff) {
List<HollowDiffUIBreadcrumbs> breadcrumbs = new ArrayList<HollowDiffUIBreadcrumbs>();
breadcrumbs.add(new HollowDiffUIBreadcrumbs((diffUI.getDiffUIPath() == null || diffUI.getDiffUIPath().length() == 0) ?
"/" : diffUI.getDiffUIPath(), "Overview"));
breadcrumbs.add(new HollowDiffUIBreadcrumbs((diffUI.getDiffUIPath() == null || diffUI.getDiffUIPath().length() == 0) ?
"typediff?type=" + typeDiff.getTypeName() : diffUI.getDiffUIPath() + "/typediff?type=" + typeDiff.getTypeName(), typeDiff.getTypeName()));
breadcrumbs.add(new HollowDiffUIBreadcrumbs(null, fieldDiff.getFieldIdentifier().toString()));
return breadcrumbs;
}
}
| 9,465 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diff/ui/pages/DiffTypePage.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diff.ui.pages;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.diff.ui.HollowDiffUI;
import com.netflix.hollow.diff.ui.model.HollowDiffUIBreadcrumbs;
import com.netflix.hollow.diff.ui.model.HollowFieldDiffScore;
import com.netflix.hollow.diff.ui.model.HollowObjectPairDiffScore;
import com.netflix.hollow.diff.ui.model.HollowUnmatchedObject;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import com.netflix.hollow.tools.diff.count.HollowFieldDiff;
import com.netflix.hollow.ui.HollowUISession;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.http.HttpServletRequest;
import org.apache.velocity.VelocityContext;
public class DiffTypePage extends DiffPage {
private final ConcurrentHashMap<String, List<HollowObjectPairDiffScore>> typeObjectPairScores;
private final ConcurrentHashMap<String, List<HollowUnmatchedObject>> unmatchedFromObjects;
private final ConcurrentHashMap<String, List<HollowUnmatchedObject>> unmatchedToObjects;
public DiffTypePage(HollowDiffUI diffUI) {
super(diffUI, "diff-type.vm");
this.typeObjectPairScores = new ConcurrentHashMap<String, List<HollowObjectPairDiffScore>>();
this.unmatchedFromObjects = new ConcurrentHashMap<String, List<HollowUnmatchedObject>>();
this.unmatchedToObjects = new ConcurrentHashMap<String, List<HollowUnmatchedObject>>();
}
@Override
protected void setUpContext(HttpServletRequest req, HollowUISession session, VelocityContext ctx) {
String typeName = req.getParameter("type");
HollowTypeDiff typeDiff = getTypeDiff(typeName);
int diffPairBeginIdx = intParam(req, session, typeName, "diffPairBeginIdx", 0);
int diffPairPageSize = intParam(req, session, typeName, "diffPairPageSize", 25);
int unmatchedFromBeginIdx = intParam(req, session, typeName, "unmatchedFromBeginIdx", 0);
int unmatchedToBeginIdx = intParam(req, session, typeName, "unmatchedToBeginIdx", 0);
int unmatchedPageSize = intParam(req, session, typeName, "unmatchedPageSize", 25);
boolean showFields = boolParam(req, session, typeName, "showFields", true);
List<HollowObjectPairDiffScore> pairs = lazyGetDiffScorePairs(typeDiff);
List<HollowUnmatchedObject> unmatchedFrom = lazyGetUnmatchedFromObjects(typeDiff);
List<HollowUnmatchedObject> unmatchedTo = lazyGetUnmatchedToObjects(typeDiff);
ctx.put("objectScorePairs", sublist(pairs, diffPairBeginIdx, diffPairPageSize));
ctx.put("unmatchedFromObjects", sublist(unmatchedFrom, unmatchedFromBeginIdx, unmatchedPageSize));
ctx.put("unmatchedToObjects", sublist(unmatchedTo, unmatchedToBeginIdx, unmatchedPageSize));
ctx.put("typeDiff", typeDiff);
ctx.put("fieldDiffs", getDisplayDiffs(typeDiff));
ctx.put("numObjectsDiff", pairs.size());
if(diffPairBeginIdx > 0)
ctx.put("previousDiffPairPageBeginIdx", diffPairBeginIdx - diffPairPageSize);
if((diffPairBeginIdx + diffPairPageSize) < pairs.size())
ctx.put("nextDiffPairPageBeginIdx", diffPairBeginIdx + diffPairPageSize);
if(unmatchedFromBeginIdx > 0)
ctx.put("previousUnmatchedFromPageBeginIdx", unmatchedFromBeginIdx - unmatchedPageSize);
if((unmatchedFromBeginIdx + unmatchedPageSize) < unmatchedFrom.size())
ctx.put("nextUnmatchedFromPageBeginIdx", unmatchedFromBeginIdx + unmatchedPageSize);
if(unmatchedToBeginIdx > 0)
ctx.put("previousUnmatchedToPageBeginIdx", unmatchedToBeginIdx - unmatchedPageSize);
if((unmatchedToBeginIdx + unmatchedPageSize) < unmatchedTo.size())
ctx.put("nextUnmatchedToPageBeginIdx", unmatchedToBeginIdx + unmatchedPageSize);
ctx.put("showFields", showFields);
ctx.put("breadcrumbs", getBreadcrumbs(typeDiff));
}
private <T> List<T> sublist(List<T> list, int fromIndex, int pageSize) {
if(fromIndex >= list.size())
fromIndex = 0;
if(fromIndex + pageSize >= list.size())
pageSize = list.size() - fromIndex;
return list.subList(fromIndex, fromIndex + pageSize);
}
private List<HollowFieldDiffScore> getDisplayDiffs(HollowTypeDiff typeDiff) {
List<HollowFieldDiff> fieldDiffs = typeDiff.getFieldDiffs();
List<HollowFieldDiffScore> displayDiffs = new ArrayList<HollowFieldDiffScore>();
for(int i=0;i<fieldDiffs.size();i++) {
HollowFieldDiff fieldDiff = fieldDiffs.get(i);
displayDiffs.add(
new HollowFieldDiffScore(
typeDiff.getTypeName(),
i,
fieldDiff.getFieldIdentifier().toString(),
fieldDiff.getNumDiffs(),
typeDiff.getTotalNumberOfMatches(),
fieldDiff.getTotalDiffScore())
);
}
Collections.sort(displayDiffs);
return displayDiffs;
}
private List<HollowObjectPairDiffScore> lazyGetDiffScorePairs(HollowTypeDiff typeDiff) {
List<HollowObjectPairDiffScore> scores = typeObjectPairScores.get(typeDiff.getTypeName());
if(scores != null) {
return scores;
}
scores = aggregateFieldDiffScores(typeDiff);
List<HollowObjectPairDiffScore> existingScores = typeObjectPairScores.putIfAbsent(typeDiff.getTypeName(), scores);
return existingScores != null ? existingScores : scores;
}
private List<HollowObjectPairDiffScore> aggregateFieldDiffScores(HollowTypeDiff typeDiff) {
// Handle from State missing Type
if (typeDiff.getFromTypeState()==null) return Collections.emptyList();
List<HollowObjectPairDiffScore> scores;
int maxFromOrdinal = typeDiff.getFromTypeState().maxOrdinal();
HollowObjectPairDiffScore[] allDiffPairsIndexedByFromOrdinal = new HollowObjectPairDiffScore[maxFromOrdinal + 1];
int diffPairCounts = 0;
for(HollowFieldDiff fieldDiff : typeDiff.getFieldDiffs()) {
for(int i=0;i<fieldDiff.getNumDiffs();i++) {
int fromOrdinal = fieldDiff.getFromOrdinal(i);
if(allDiffPairsIndexedByFromOrdinal[fromOrdinal] == null) {
String displayKey = typeDiff.getMatcher().getKeyDisplayString(typeDiff.getFromTypeState(), fromOrdinal);
allDiffPairsIndexedByFromOrdinal[fromOrdinal] = new HollowObjectPairDiffScore(displayKey, fromOrdinal, fieldDiff.getToOrdinal(i));
diffPairCounts++;
}
allDiffPairsIndexedByFromOrdinal[fromOrdinal].incrementDiffScore(fieldDiff.getPairScore(i));
}
}
scores = new ArrayList<HollowObjectPairDiffScore>(diffPairCounts);
for(HollowObjectPairDiffScore score : allDiffPairsIndexedByFromOrdinal) {
if(score != null)
scores.add(score);
}
Collections.sort(scores);
return scores;
}
private List<HollowUnmatchedObject> lazyGetUnmatchedFromObjects(HollowTypeDiff typeDiff) {
return lazyGetUnmatchedObjects(unmatchedFromObjects, typeDiff, typeDiff.getFromTypeState(), typeDiff.getUnmatchedOrdinalsInFrom());
}
private List<HollowUnmatchedObject> lazyGetUnmatchedToObjects(HollowTypeDiff typeDiff) {
return lazyGetUnmatchedObjects(unmatchedToObjects, typeDiff, typeDiff.getToTypeState(), typeDiff.getUnmatchedOrdinalsInTo());
}
private List<HollowUnmatchedObject> lazyGetUnmatchedObjects(
ConcurrentHashMap<String, List<HollowUnmatchedObject>> cache,
HollowTypeDiff typeDiff,
HollowObjectTypeReadState typeState,
IntList unmatchedOrdinals) {
// Handle typeState missing from either from or to
if (typeState==null) return Collections.emptyList();
List<HollowUnmatchedObject> list = cache.get(typeDiff.getTypeName());
if(list != null)
return list;
list = new ArrayList<HollowUnmatchedObject>();
for(int i=0;i<unmatchedOrdinals.size();i++) {
int ordinal = unmatchedOrdinals.get(i);
String keyDisplay = typeDiff.getMatcher().getKeyDisplayString(typeState, ordinal);
list.add(new HollowUnmatchedObject(keyDisplay, ordinal));
}
List<HollowUnmatchedObject> existingList = cache.putIfAbsent(typeState.getSchema().getName(), list);
return existingList != null ? existingList : list;
}
private List<HollowDiffUIBreadcrumbs> getBreadcrumbs(HollowTypeDiff typeDiff) {
List<HollowDiffUIBreadcrumbs> breadcrumbs = new ArrayList<HollowDiffUIBreadcrumbs>();
breadcrumbs.add(new HollowDiffUIBreadcrumbs((diffUI.getDiffUIPath() == null || diffUI.getDiffUIPath().length() == 0) ?
"/" : diffUI.getDiffUIPath(), "Overview"));
breadcrumbs.add(new HollowDiffUIBreadcrumbs(null, typeDiff.getTypeName()));
return breadcrumbs;
}
}
| 9,466 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/HollowHistoryRefreshListener.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.consumer.HollowConsumer.Blob;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.tools.history.HollowHistory;
/**
* This can listener can be attached to either (a) a single consumer that always transitions in the forward direction of
* increasing versions (v1, v2, v3, etc.), or (b) to two consumers that start at the same version but one always transitions
* in the forward direction and the other always transitions in the reverse direction.
*
* This class synchronizes modifications to HollowHistory to prevent the two consumers (if applicable) from concurrently
* invoking modifications on the underlying HollowHistory object.
*/
public class HollowHistoryRefreshListener extends HollowConsumer.AbstractRefreshListener {
private final HollowHistory history;
public HollowHistoryRefreshListener(HollowHistory history) {
this.history = history;
}
@Override
public synchronized void snapshotUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception {
history.doubleSnapshotOccurred(stateEngine, version);
}
@Override
public synchronized void deltaUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception {
if (stateEngine == history.getOldestState()) {
history.reverseDeltaOccurred(version);
} else {
history.deltaOccurred(version);
}
}
@Override public void refreshStarted(long currentVersion, long requestedVersion) { }
@Override public void refreshSuccessful(long beforeVersion, long afterVersion, long requestedVersion) { }
@Override public void refreshFailed(long beforeVersion, long afterVersion, long requestedVersion, Throwable failureCause) { }
@Override public void blobLoaded(Blob transition) { }
}
| 9,467 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/HollowHistoryUI.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui;
import static com.netflix.hollow.ui.HollowUISession.getSession;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.diffview.DiffViewOutputGenerator;
import com.netflix.hollow.diffview.HollowHistoryViewProvider;
import com.netflix.hollow.diffview.HollowObjectViewProvider;
import com.netflix.hollow.diffview.effigy.CustomHollowEffigyFactory;
import com.netflix.hollow.diffview.effigy.HollowRecordDiffUI;
import com.netflix.hollow.diffview.effigy.pairer.exact.ExactRecordMatcher;
import com.netflix.hollow.diffview.effigy.pairer.exact.HistoryExactRecordMatcher;
import com.netflix.hollow.history.ui.naming.HollowHistoryRecordNamer;
import com.netflix.hollow.history.ui.pages.HistoricalObjectDiffPage;
import com.netflix.hollow.history.ui.pages.HistoryOverviewPage;
import com.netflix.hollow.history.ui.pages.HistoryQueryPage;
import com.netflix.hollow.history.ui.pages.HistoryStatePage;
import com.netflix.hollow.history.ui.pages.HistoryStateTypeExpandGroupPage;
import com.netflix.hollow.history.ui.pages.HistoryStateTypePage;
import com.netflix.hollow.tools.history.HollowHistory;
import com.netflix.hollow.ui.HollowUIRouter;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.TimeZone;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class HollowHistoryUI extends HollowUIRouter implements HollowRecordDiffUI {
private final HollowHistory history;
private final HistoryOverviewPage overviewPage;
private final HistoryStatePage statePage;
private final HistoryStateTypePage stateTypePage;
private final HistoryStateTypeExpandGroupPage stateTypeExpandPage;
private final HistoryQueryPage queryPage;
private final HistoricalObjectDiffPage objectDiffPage;
private final HollowObjectViewProvider viewProvider;
private final DiffViewOutputGenerator diffViewOutputGenerator;
private final Map<String, CustomHollowEffigyFactory> customHollowEffigyFactories;
private final Map<String, HollowHistoryRecordNamer> customHollowRecordNamers;
private final Map<String, PrimaryKey> matchHints;
private final TimeZone timeZone;
private String[] overviewDisplayHeaders;
/**
* HollowHistoryUI constructor that builds history for a consumer that transitions forwards i.e. in increasing
* version order (v1, v2, v3...). This constructor defaults max states to 1024 and time zone to PST.
*
* @param baseUrlPath url path for history UI endpoint
* @param consumer HollowConsumer (already initialized with data) that will be traversing forward deltas or double snapshots
*/
public HollowHistoryUI(String baseUrlPath, HollowConsumer consumer) {
this(baseUrlPath, consumer, 1024, VersionTimestampConverter.PACIFIC_TIMEZONE);
}
public HollowHistoryUI(String baseUrlPath, HollowConsumer consumer, TimeZone timeZone) {
this(baseUrlPath, consumer, 1024, timeZone);
}
public HollowHistoryUI(String baseUrlPath, HollowConsumer consumer, int numStatesToTrack, TimeZone timeZone) {
this(baseUrlPath, createHistory(consumer, numStatesToTrack), timeZone);
}
/**
* HollowHistoryUI that supports building history in both directions simultaneously.
* Fwd and rev consumers should be initialized to the same version before calling this constructor.
* This constructor defaults max states to 1024 and time zone to PST.
*
* @param baseUrlPath url path for history UI endpoint
* @param consumerFwd HollowConsumer (already initialized with data) that will be traversing forward deltas or double snapshots
* @param consumerRev HollowConsumer (also initialized to the same version as consumerFwd) that will be traversing reverse deltas
*/
public HollowHistoryUI(String baseUrlPath, HollowConsumer consumerFwd, HollowConsumer consumerRev) {
this(baseUrlPath, consumerFwd, consumerRev, 1024, VersionTimestampConverter.PACIFIC_TIMEZONE);
}
public HollowHistoryUI(String baseUrlPath, HollowConsumer consumerFwd, HollowConsumer consumerRev, int numStatesToTrack, TimeZone timeZone) {
this(baseUrlPath, createHistory(consumerFwd, consumerRev, numStatesToTrack), timeZone);
}
public HollowHistoryUI(String baseUrlPath, HollowHistory history) {
this(baseUrlPath, history, VersionTimestampConverter.PACIFIC_TIMEZONE);
}
public HollowHistoryUI(String baseUrlPath, HollowHistory history, TimeZone timeZone) {
super(baseUrlPath);
this.history = history;
this.overviewPage = new HistoryOverviewPage(this);
this.statePage = new HistoryStatePage(this);
this.queryPage = new HistoryQueryPage(this);
this.objectDiffPage = new HistoricalObjectDiffPage(this);
this.stateTypePage = new HistoryStateTypePage(this);
this.stateTypeExpandPage = new HistoryStateTypeExpandGroupPage(this);
this.viewProvider = new HollowHistoryViewProvider(this);
this.diffViewOutputGenerator = new DiffViewOutputGenerator(viewProvider);
this.customHollowEffigyFactories = new HashMap<>();
this.customHollowRecordNamers = new HashMap<>();
this.matchHints = new HashMap<>();
this.overviewDisplayHeaders = new String[0];
this.timeZone = timeZone;
}
private static HollowHistory createHistory(HollowConsumer consumer, int numStatesToTrack) {
return createHistory(consumer, null, numStatesToTrack);
}
private static HollowHistory createHistory(HollowConsumer consumerFwd, HollowConsumer consumerRev, int numStatesToTrack) {
if (consumerRev == null) {
consumerFwd.getRefreshLock().lock();
try {
HollowHistory history = new HollowHistory(consumerFwd.getStateEngine(), consumerFwd.getCurrentVersionId(), numStatesToTrack);
consumerFwd.addRefreshListener(new HollowHistoryRefreshListener(history));
return history;
} finally {
consumerFwd.getRefreshLock().unlock();
}
} else {
consumerFwd.getRefreshLock().lock();
consumerRev.getRefreshLock().lock();
try {
HollowHistory history = new HollowHistory(consumerFwd.getStateEngine(), consumerRev.getStateEngine(),
consumerFwd.getCurrentVersionId(), consumerRev.getCurrentVersionId(), numStatesToTrack);
HollowHistoryRefreshListener listener = new HollowHistoryRefreshListener(history);
consumerFwd.addRefreshListener(listener);
consumerRev.addRefreshListener(listener);
return history;
} finally {
consumerFwd.getRefreshLock().unlock();
consumerRev.getRefreshLock().unlock();
}
}
}
public HollowHistory getHistory() {
return history;
}
@Override
public boolean handle(String target, HttpServletRequest req, HttpServletResponse resp)
throws IOException {
String pageName = getTargetRootPath(target);
if("diffrowdata".equals(pageName)) {
diffViewOutputGenerator.uncollapseRow(req, resp);
return true;
} else if("collapsediffrow".equals(pageName)) {
diffViewOutputGenerator.collapseRow(req, resp);
return true;
}
resp.setContentType("text/html");
if("resource".equals(pageName)) {
if(serveResource(req, resp, getResourceName(target)))
return true;
} else if("".equals(pageName) || "overview".equals(pageName)) {
if(req.getParameter("format") != null && req.getParameter("format").equals("json")) {
overviewPage.sendJson(req, resp);
return true;
}
overviewPage.render(req, getSession(req, resp), resp.getWriter());
} else if("state".equals(pageName)) {
if(req.getParameter("format") != null && req.getParameter("format").equals("json")) {
statePage.sendJson(req, resp);
return true;
}
statePage.render(req, getSession(req, resp), resp.getWriter());
return true;
} else if("statetype".equals(pageName)) {
if(req.getParameter("format") != null && req.getParameter("format").equals("json")) {
stateTypePage.sendJson(req, getSession(req, resp), resp);
return true;
}
stateTypePage.render(req, getSession(req, resp), resp.getWriter());
return true;
} else if("statetypeexpand".equals(pageName)) {
stateTypeExpandPage.render(req, getSession(req, resp), resp.getWriter());
return true;
} else if("query".equals(pageName)) {
queryPage.render(req, getSession(req, resp), resp.getWriter());
return true;
} else if("historicalObject".equals(pageName)) {
objectDiffPage.render(req, getSession(req, resp), resp.getWriter());
return true;
}
return false;
}
public void addCustomHollowRecordNamer(String typeName, HollowHistoryRecordNamer recordNamer) {
customHollowRecordNamers.put(typeName, recordNamer);
}
public void addCustomHollowEffigyFactory(String typeName, CustomHollowEffigyFactory factory) {
customHollowEffigyFactories.put(typeName, factory);
}
@Override
public CustomHollowEffigyFactory getCustomHollowEffigyFactory(String typeName) {
return customHollowEffigyFactories.get(typeName);
}
public void addMatchHint(PrimaryKey matchHint) {
this.matchHints.put(matchHint.getType(), matchHint);
}
@Override
public Map<String, PrimaryKey> getMatchHints() {
return matchHints;
}
public ExactRecordMatcher getExactRecordMatcher() {
return HistoryExactRecordMatcher.INSTANCE;
}
public void setOverviewDisplayHeaders(String... displayHeaders) {
this.overviewDisplayHeaders = displayHeaders;
}
public HollowHistoryRecordNamer getHistoryRecordNamer(String typeName) {
HollowHistoryRecordNamer recordNamer = customHollowRecordNamers.get(typeName);
if(recordNamer == null)
return HollowHistoryRecordNamer.DEFAULT_RECORD_NAMER;
return recordNamer;
}
public String[] getOverviewDisplayHeaders() {
return overviewDisplayHeaders;
}
public HollowObjectViewProvider getViewProvider() {
return viewProvider;
}
public TimeZone getTimeZone() {
return timeZone;
}
}
| 9,468 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/VersionTimestampConverter.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.TimeZone;
public class VersionTimestampConverter {
public static final TimeZone PACIFIC_TIMEZONE = TimeZone.getTimeZone("America/Los_Angeles");
private static final TimeZone UTC_TIMEZONE = TimeZone.getTimeZone("UTC");
private static long ADD_MILLIS_TO_TIMESTAMP = 0;
public static void addMillisToTimestamps(long millis) {
ADD_MILLIS_TO_TIMESTAMP = millis;
}
public static String getTimestamp(long versionLong, TimeZone timeZone) {
String version = String.valueOf(versionLong);
SimpleDateFormat utcFormat = new SimpleDateFormat("yyyyMMddHHmmssSSS");
utcFormat.setTimeZone(UTC_TIMEZONE);
try {
Date date = utcFormat.parse(version);
Date adjustedDate = new Date(date.getTime() + ADD_MILLIS_TO_TIMESTAMP);
SimpleDateFormat sdf = new SimpleDateFormat("[MM/dd HH:mm z] ");
sdf.setTimeZone(timeZone);
return sdf.format(adjustedDate);
} catch (ParseException ignore) { }
return String.valueOf(versionLong);
}
}
| 9,469 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/HollowHistoryUIServer.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.tools.history.HollowHistory;
import com.netflix.hollow.ui.HollowUIWebServer;
import com.netflix.hollow.ui.HttpHandlerWithServletSupport;
import java.util.TimeZone;
public class HollowHistoryUIServer {
private final HollowUIWebServer server;
private final HollowHistoryUI ui;
/**
* HollowHistoryUIServer that builds history using a consumer that transitions forwards i.e. in increasing version
* order (v1, v2, v3...). This constructor defaults time zone to PST.
*
* @param consumer HollowConsumer (already initialized with data) that will be traversing forward deltas
* @param port server port
*/
public HollowHistoryUIServer(HollowConsumer consumer, int port) {
this(new HollowHistoryUI("", consumer), port);
}
/**
* Serves HollowHistoryUI that supports building history in both directions simultaneously.
* Fwd and rev consumers should be initialized to the same version before calling this constructor.
* Attempting double snapshots or forward version transitions on consumerRev will have unintended consequences on history.
* This constructor defaults max states to 1024 and time zone to PST.
*
* @param consumerFwd HollowConsumer (already initialized with data) that will be traversing forward deltas
* @param consumerRev HollowConsumer (also initialized to the same version as consumerFwd) that will be traversing reverse deltas
* @param port server port
*/
public HollowHistoryUIServer(HollowConsumer consumerFwd, HollowConsumer consumerRev, int port) {
this(consumerFwd, consumerRev, 1024, port, VersionTimestampConverter.PACIFIC_TIMEZONE);
}
public HollowHistoryUIServer(HollowConsumer consumerFwd, HollowConsumer consumerRev, int numStatesToTrack, int port, TimeZone timeZone) {
this(new HollowHistoryUI("", consumerFwd, consumerRev, numStatesToTrack, timeZone), port);
}
public HollowHistoryUIServer(HollowConsumer consumer, int port, TimeZone timeZone) {
this(new HollowHistoryUI("", consumer, timeZone), port);
}
public HollowHistoryUIServer(HollowConsumer consumer, int numStatesToTrack, int port, TimeZone timeZone) {
this(new HollowHistoryUI("", consumer, numStatesToTrack, timeZone), port);
}
public HollowHistoryUIServer(HollowConsumer consumer, int numStatesToTrack, int port) {
this(new HollowHistoryUI("", consumer, numStatesToTrack, VersionTimestampConverter.PACIFIC_TIMEZONE), port);
}
public HollowHistoryUIServer(HollowHistory history, int port) {
this(new HollowHistoryUI("", history), port);
}
public HollowHistoryUIServer(HollowHistoryUI ui, int port) {
this.server = new HollowUIWebServer(new HttpHandlerWithServletSupport(ui), port);
this.ui = ui;
}
public HollowHistoryUIServer start() throws Exception {
server.start();
return this;
}
public HollowHistoryUIServer join() throws InterruptedException {
server.join();
return this;
}
public void stop() throws Exception {
server.stop();
}
public HollowHistoryUI getUI() {
return ui;
}
}
| 9,470 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/jetty/HollowHistoryUIServer.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.jetty;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.history.ui.HollowHistoryUI;
import com.netflix.hollow.history.ui.VersionTimestampConverter;
import com.netflix.hollow.tools.history.HollowHistory;
import java.util.TimeZone;
/**
* @deprecated use {@link com.netflix.hollow.history.ui.HollowHistoryUIServer}. This is deprecated because package name
* contains "jetty" but jetty-server dep is no longer required. Instead, this class lives on as an adapter
* over {@link com.netflix.hollow.history.ui.HollowHistoryUIServer}.
*/
@Deprecated
public class HollowHistoryUIServer {
private final com.netflix.hollow.history.ui.HollowHistoryUIServer server;
/**
* HollowHistoryUIServer that builds history using a consumer that transitions forwards i.e. in increasing version
* order (v1, v2, v3...). This constructor defaults time zone to PST.
*
* @param consumer HollowConsumer (already initialized with data) that will be traversing forward deltas
* @param port server port
*/
public HollowHistoryUIServer(HollowConsumer consumer, int port) {
server = new com.netflix.hollow.history.ui.HollowHistoryUIServer(consumer, port);
}
/**
* Serves HollowHistoryUI that supports building history in both directions simultaneously.
* Fwd and rev consumers should be initialized to the same version before calling this constructor.
* Attempting double snapshots or forward version transitions on consumerRev will have unintended consequences on history.
* This constructor defaults max states to 1024 and time zone to PST.
*
* @param consumerFwd HollowConsumer (already initialized with data) that will be traversing forward deltas
* @param consumerRev HollowConsumer (also initialized to the same version as consumerFwd) that will be traversing reverse deltas
* @param port server port
*/
public HollowHistoryUIServer(HollowConsumer consumerFwd, HollowConsumer consumerRev, int port) {
this(consumerFwd, consumerRev, 1024, port, VersionTimestampConverter.PACIFIC_TIMEZONE);
}
public HollowHistoryUIServer(HollowConsumer consumerFwd, HollowConsumer consumerRev, int numStatesToTrack, int port, TimeZone timeZone) {
this(new HollowHistoryUI("", consumerFwd, consumerRev, numStatesToTrack, timeZone), port);
}
public HollowHistoryUIServer(HollowConsumer consumer, int port, TimeZone timeZone) {
server = new com.netflix.hollow.history.ui.HollowHistoryUIServer(consumer, port, timeZone);
}
public HollowHistoryUIServer(HollowConsumer consumer, int numStatesToTrack, int port, TimeZone timeZone) {
server = new com.netflix.hollow.history.ui.HollowHistoryUIServer( consumer, numStatesToTrack, port, timeZone);
}
public HollowHistoryUIServer(HollowConsumer consumer, int numStatesToTrack, int port) {
server = new com.netflix.hollow.history.ui.HollowHistoryUIServer( consumer, numStatesToTrack, port);
}
public HollowHistoryUIServer(HollowHistory history, int port) {
server = new com.netflix.hollow.history.ui.HollowHistoryUIServer( history, port);
}
public HollowHistoryUIServer(HollowHistoryUI ui, int port) {
server = new com.netflix.hollow.history.ui.HollowHistoryUIServer(ui, port);
}
public HollowHistoryUIServer start() throws Exception {
server.start();
return this;
}
public HollowHistoryUIServer join() throws InterruptedException {
server.join();
return this;
}
public void stop() throws Exception {
server.stop();
}
public HollowHistoryUI getUI() {
return server.getUI();
}
}
| 9,471 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/naming/HollowHistoryRecordNamer.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.naming;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateTypeKeyOrdinalMapping;
public class HollowHistoryRecordNamer {
public static final HollowHistoryRecordNamer DEFAULT_RECORD_NAMER = new HollowHistoryRecordNamer();
public String getRecordName(HollowHistoricalState historicalState, HollowHistoricalStateTypeKeyOrdinalMapping typeKeyMapping, int keyOrdinal, HollowObjectTypeDataAccess dataAccess, int recordOrdinal) {
String recordName = getRecordName(dataAccess, recordOrdinal);
if(recordName != null)
return recordName;
return typeKeyMapping.getKeyIndex().getKeyDisplayString(keyOrdinal);
}
public String getRecordName(HollowObjectTypeDataAccess dataAccess, int recordOrdinal) {
return null;
}
public String getKeyFieldName(HollowHistoricalState historicalState, Object o, int keyFieldIdx) {
return String.valueOf(o);
}
}
| 9,472 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/model/HistoryStateTypeChanges.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.model;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.util.IntMap.IntMapEntryIterator;
import com.netflix.hollow.history.ui.naming.HollowHistoryRecordNamer;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateTypeKeyOrdinalMapping;
import java.util.Arrays;
public class HistoryStateTypeChanges {
private final long stateVersion;
private final String typeName;
private final String groupedFieldNames[];
private final RecordDiffTreeNode modifiedRecords;
private final RecordDiffTreeNode addedRecords;
private final RecordDiffTreeNode removedRecords;
public HistoryStateTypeChanges(HollowHistoricalState historicalState, String typeName, HollowHistoryRecordNamer recordNamer, String... groupedFieldNames) {
this.stateVersion = historicalState.getVersion();
this.typeName = typeName;
this.groupedFieldNames = groupedFieldNames;
this.modifiedRecords = new RecordDiffTreeNode("", "Modified", "Modified", historicalState, recordNamer);
this.addedRecords = new RecordDiffTreeNode("", "Added", "Added", historicalState, recordNamer);
this.removedRecords = new RecordDiffTreeNode("", "Removed", "Removed", historicalState, recordNamer);
HollowHistoricalStateTypeKeyOrdinalMapping typeKeyMapping = historicalState.getKeyOrdinalMapping().getTypeMappings().get(typeName);
HollowObjectTypeDataAccess dataAccess = (HollowObjectTypeDataAccess) historicalState.getDataAccess().getTypeDataAccess(typeName);
int[] groupedFieldIndexes = getGroupedFieldIndexes(groupedFieldNames, typeKeyMapping.getKeyIndex().getKeyFields());
IntMapEntryIterator removedIter = typeKeyMapping.removedOrdinalMappingIterator();
IntMapEntryIterator addedIter = typeKeyMapping.addedOrdinalMappingIterator();
while(removedIter.next()) {
int fromOrdinal = removedIter.getValue();
int toOrdinal = typeKeyMapping.findAddedOrdinal(removedIter.getKey());
if(toOrdinal != -1) {
addRecordDiff(modifiedRecords, historicalState, typeKeyMapping, recordNamer, dataAccess, removedIter.getKey(), fromOrdinal, toOrdinal, groupedFieldIndexes);
} else {
addRecordDiff(removedRecords, historicalState, typeKeyMapping, recordNamer, dataAccess, removedIter.getKey(), fromOrdinal, toOrdinal, groupedFieldIndexes);
}
}
while(addedIter.next()) {
if(typeKeyMapping.findRemovedOrdinal(addedIter.getKey()) == -1) {
int toOrdinal = addedIter.getValue();
addRecordDiff(addedRecords, historicalState, typeKeyMapping, recordNamer, dataAccess, addedIter.getKey(), -1, toOrdinal, groupedFieldIndexes);
}
}
}
private void addRecordDiff(RecordDiffTreeNode node, HollowHistoricalState historicalState, HollowHistoricalStateTypeKeyOrdinalMapping typeKeyMapping, HollowHistoryRecordNamer recordNamer, HollowObjectTypeDataAccess dataAccess, int keyOrdinal, int fromOrdinal, int toOrdinal, int[] fieldGroupIndexes) {
for(int i=0;i<fieldGroupIndexes.length;i++) {
node = node.getChildNode(typeKeyMapping.getKeyIndex().getKeyFieldValue(fieldGroupIndexes[i], keyOrdinal), fieldGroupIndexes[i]);
}
node.addRecordDiff(new RecordDiff(historicalState, recordNamer, typeKeyMapping, dataAccess, keyOrdinal, fromOrdinal, toOrdinal));
}
public long getStateVersion() {
return stateVersion;
}
public String getTypeName() {
return typeName;
}
public String[] getGroupedFieldNames() {
return groupedFieldNames;
}
public RecordDiffTreeNode getModifiedRecords() {
return modifiedRecords;
}
public RecordDiffTreeNode getAddedRecords() {
return addedRecords;
}
public RecordDiffTreeNode getRemovedRecords() {
return removedRecords;
}
public boolean isEmpty() {
return modifiedRecords.isEmpty() && addedRecords.isEmpty() && removedRecords.isEmpty();
}
private int[] getGroupedFieldIndexes(String groupedFieldNames[], String[] keyFields) {
int[] groupedFieldIndexes = new int[groupedFieldNames.length];
Arrays.fill(groupedFieldIndexes, -1);
for(int i=0;i<groupedFieldNames.length;i++) {
for(int j=0;j<keyFields.length;j++) {
if(groupedFieldNames[i].equals(keyFields[j])) {
groupedFieldIndexes[i] = j;
}
}
}
return groupedFieldIndexes;
}
public RecordDiffTreeNode findTreeNode(String hierarchicalFieldName) {
RecordDiffTreeNode node = findTreeNode(modifiedRecords, hierarchicalFieldName);
if(node != null)
return node;
node = findTreeNode(addedRecords, hierarchicalFieldName);
if(node != null)
return node;
return findTreeNode(removedRecords, hierarchicalFieldName);
}
private RecordDiffTreeNode findTreeNode(RecordDiffTreeNode treeNode, String hierarchicalFieldName) {
if(treeNode.getHierarchicalFieldName().equals(hierarchicalFieldName))
return treeNode;
for(RecordDiffTreeNode child : treeNode.getSubGroups()) {
RecordDiffTreeNode matchedDescendent = findTreeNode(child, hierarchicalFieldName);
if(matchedDescendent != null)
return matchedDescendent;
}
return null;
}
}
| 9,473 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/model/HistoryStateQueryMatches.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.model;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.history.ui.HollowHistoryUI;
import com.netflix.hollow.history.ui.naming.HollowHistoryRecordNamer;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateTypeKeyOrdinalMapping;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class HistoryStateQueryMatches {
private final long stateVersion;
private final String dateDisplayString;
private final List<TypeMatches> typeMatches;
public HistoryStateQueryMatches(HollowHistoricalState historicalState, HollowHistoryUI ui, String dateDisplayString, Map<String, IntList> perTypeQueryMatchingKeys) {
this.stateVersion = historicalState.getVersion();
this.dateDisplayString = dateDisplayString;
this.typeMatches = new ArrayList<TypeMatches>(historicalState.getKeyOrdinalMapping().getTypeMappings().size());
for(Map.Entry<String, IntList> entry : perTypeQueryMatchingKeys.entrySet()) {
HollowHistoryRecordNamer recordNamer = ui.getHistoryRecordNamer(entry.getKey());
TypeMatches typeMatches = new TypeMatches(historicalState, recordNamer, entry.getKey(), entry.getValue());
if(typeMatches.hasMatches())
this.typeMatches.add(typeMatches);
}
}
public boolean hasMatches() {
return !typeMatches.isEmpty();
}
public long getStateVersion() {
return stateVersion;
}
public String getDateDisplayString() {
return dateDisplayString;
}
public List<TypeMatches> getTypeMatches() {
return typeMatches;
}
public static class TypeMatches {
private final String type;
private final List<RecordDiff> modifiedRecords;
private final List<RecordDiff> removedRecords;
private final List<RecordDiff> addedRecords;
public TypeMatches(HollowHistoricalState historicalState, HollowHistoryRecordNamer recordNamer, String type, IntList queryMatchingKeys) {
this.type = type;
this.modifiedRecords = new ArrayList<RecordDiff>();
this.removedRecords = new ArrayList<RecordDiff>();
this.addedRecords = new ArrayList<RecordDiff>();
HollowHistoricalStateTypeKeyOrdinalMapping typeKeyMapping = historicalState.getKeyOrdinalMapping().getTypeMapping(type);
if (typeKeyMapping == null) {
return;
}
HollowObjectTypeDataAccess typeDataAccess = (HollowObjectTypeDataAccess) historicalState.getDataAccess().getTypeDataAccess(type);
for(int i=0;i<queryMatchingKeys.size();i++) {
int matchingKey = queryMatchingKeys.get(i);
int removedOrdinal = typeKeyMapping.findRemovedOrdinal(matchingKey);
int addedOrdinal = typeKeyMapping.findAddedOrdinal(matchingKey);
if(removedOrdinal != -1 && addedOrdinal != -1) {
modifiedRecords.add(new RecordDiff(historicalState, recordNamer, typeKeyMapping, typeDataAccess, matchingKey, removedOrdinal, addedOrdinal));
} else if(removedOrdinal != -1) {
removedRecords.add(new RecordDiff(historicalState, recordNamer, typeKeyMapping, typeDataAccess, matchingKey, removedOrdinal, addedOrdinal));
} else if(addedOrdinal != -1) {
addedRecords.add(new RecordDiff(historicalState, recordNamer, typeKeyMapping, typeDataAccess, matchingKey, removedOrdinal, addedOrdinal));
}
}
}
public boolean hasMatches() {
return !(modifiedRecords.isEmpty() && addedRecords.isEmpty() && removedRecords.isEmpty());
}
public String getType() {
return type;
}
public List<RecordDiff> getModifiedRecords() {
return modifiedRecords;
}
public List<RecordDiff> getAddedRecords() {
return addedRecords;
}
public List<RecordDiff> getRemovedRecords() {
return removedRecords;
}
}
}
| 9,474 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/model/HistoricalObjectChangeVersion.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.model;
public class HistoricalObjectChangeVersion {
private final long versionId;
private final String dateDisplayString;
public HistoricalObjectChangeVersion(long versionId, String dateDisplayString) {
this.versionId = versionId;
this.dateDisplayString = dateDisplayString;
}
public long getVersionId() {
return versionId;
}
public String getDateDisplayString() {
return dateDisplayString;
}
}
| 9,475 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/model/HistoryOverviewRow.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.model;
import com.netflix.hollow.history.ui.pages.HistoryOverviewPage.ChangeBreakdown;
import java.util.Map;
public class HistoryOverviewRow {
private final String dateDisplayString;
private final long version;
private final Map<String, ChangeBreakdown> topLevelChangesByType;
private final ChangeBreakdown topLevelChanges;
private final String[] overviewDisplayHeaderValues;
public HistoryOverviewRow(String dateDisplayString, long version, ChangeBreakdown topLevelChanges, Map<String, ChangeBreakdown> topLevelChangesByType, String[] overviewDisplayHeaderValues) {
this.dateDisplayString = dateDisplayString;
this.version = version;
this.topLevelChanges = topLevelChanges;
this.topLevelChangesByType = topLevelChangesByType;
this.overviewDisplayHeaderValues = overviewDisplayHeaderValues;
}
public String getDateDisplayString() {
return dateDisplayString;
}
public long getVersion() {
return version;
}
public Map<String, ChangeBreakdown> getTopLevelChangesByType() {
return topLevelChangesByType;
}
public ChangeBreakdown getTopLevelChanges() {
return topLevelChanges;
}
public String[] getOverviewDisplayHeaderValues() {
return overviewDisplayHeaderValues;
}
}
| 9,476 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/model/RecordDiffTreeNode.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.model;
import com.netflix.hollow.history.ui.naming.HollowHistoryRecordNamer;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class RecordDiffTreeNode {
private final String hierarchicalFieldName;
private final HollowHistoricalState historicalState;
private final HollowHistoryRecordNamer recordNamer;
private final String groupName;
private final Map<Object, RecordDiffTreeNode> childNodes;
private final List<RecordDiff> recordDiffs;
public RecordDiffTreeNode(String parentHierarchicalFieldName, Object groupIdentifier, String groupName, HollowHistoricalState historicalState, HollowHistoryRecordNamer recordNamer) {
this.hierarchicalFieldName = parentHierarchicalFieldName + "." + String.valueOf(groupIdentifier);
this.historicalState = historicalState;
this.recordNamer = recordNamer;
this.groupName = groupName;
this.childNodes = new HashMap<Object, RecordDiffTreeNode>();
this.recordDiffs = new ArrayList<RecordDiff>();
}
public String getHierarchicalFieldName() {
return hierarchicalFieldName;
}
public String getGroupName() {
return groupName;
}
public boolean hasSubGroups() {
return !childNodes.isEmpty();
}
public RecordDiffTreeNode getChildNode(Object value, int keyFieldIdx) {
RecordDiffTreeNode child = childNodes.get(value);
if(child == null) {
child = new RecordDiffTreeNode(hierarchicalFieldName, value, recordNamer.getKeyFieldName(historicalState, value, keyFieldIdx), historicalState, recordNamer);
childNodes.put(value, child);
}
return child;
}
public void addRecordDiff(RecordDiff diff) {
recordDiffs.add(diff);
}
public List<RecordDiff> getRecordDiffs() {
return recordDiffs;
}
public boolean isEmpty() {
return recordDiffs.isEmpty() && childNodes.isEmpty();
}
public int getDiffCount() {
int totalCount = 0;
for(Map.Entry<Object, RecordDiffTreeNode> entry : childNodes.entrySet()) {
totalCount += entry.getValue().getDiffCount();
}
totalCount += recordDiffs.size();
return totalCount;
}
public Collection<RecordDiffTreeNode> getSubGroups() {
return childNodes.values();
}
}
| 9,477 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/model/RecordDiff.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.model;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.history.ui.naming.HollowHistoryRecordNamer;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateTypeKeyOrdinalMapping;
public class RecordDiff implements Comparable<RecordDiff>{
private final HollowHistoryRecordNamer recordNamer;
private final HollowHistoricalState historicalState;
private final HollowHistoricalStateTypeKeyOrdinalMapping typeKeyOrdinalMapping;
private final HollowObjectTypeDataAccess typeDataAccess;
private final int keyOrdinal;
private final int fromOrdinal;
private final int toOrdinal;
public RecordDiff(HollowHistoricalState historicalState, HollowHistoryRecordNamer recordNamer, HollowHistoricalStateTypeKeyOrdinalMapping typeKeyOrdinalMapping, HollowObjectTypeDataAccess typeDataAccess, int keyOrdinal, int fromOrdinal, int toOrdinal) {
this.historicalState = historicalState;
this.recordNamer = recordNamer;
this.typeKeyOrdinalMapping = typeKeyOrdinalMapping;
this.typeDataAccess = typeDataAccess;
this.keyOrdinal = keyOrdinal;
this.fromOrdinal = fromOrdinal;
this.toOrdinal = toOrdinal;
}
public int getKeyOrdinal() {
return keyOrdinal;
}
public String getIdentifierString() {
return recordNamer.getRecordName(historicalState, typeKeyOrdinalMapping, keyOrdinal, typeDataAccess, toOrdinal != -1 ? toOrdinal : fromOrdinal);
}
public int getFromOrdinal() {
return fromOrdinal;
}
public int getToOrdinal() {
return toOrdinal;
}
@Override
public int compareTo(RecordDiff o) {
return o.getIdentifierString().compareTo(getIdentifierString());
}
}
| 9,478 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/model/HistoryStateTypeChangeSummary.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.model;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateTypeKeyOrdinalMapping;
public class HistoryStateTypeChangeSummary {
private final long stateVersion;
private final String typeName;
private final int modifications;
private final int additions;
private final int removals;
public HistoryStateTypeChangeSummary(long stateVersion, String typeName, HollowHistoricalStateTypeKeyOrdinalMapping mapping) {
this.stateVersion = stateVersion;
this.typeName = typeName;
this.modifications = mapping.getNumberOfModifiedRecords();
this.additions = mapping.getNumberOfNewRecords();
this.removals = mapping.getNumberOfRemovedRecords();
}
public long getVersion() {
return stateVersion;
}
public String getTypeName() {
return typeName;
}
public int getTotalChanges() {
return modifications + additions + removals;
}
public int getModifications() {
return modifications;
}
public int getAdditions() {
return additions;
}
public int getRemovals() {
return removals;
}
public boolean isEmpty() {
return modifications == 0 && additions == 0 && removals == 0;
}
}
| 9,479 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/pages/HistoryStateTypePage.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.pages;
import com.google.gson.Gson;
import com.netflix.hollow.history.ui.HollowHistoryUI;
import com.netflix.hollow.history.ui.model.HistoryStateTypeChanges;
import com.netflix.hollow.history.ui.model.RecordDiff;
import com.netflix.hollow.history.ui.model.RecordDiffTreeNode;
import com.netflix.hollow.history.ui.naming.HollowHistoryRecordNamer;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import com.netflix.hollow.ui.HollowUISession;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.velocity.VelocityContext;
public class HistoryStateTypePage extends HistoryPage {
private static final String STATE_TYPE_CHANGES_SESSION_ATTRIBUTE_NAME = "HISTORY_STATE_TYPE_CHANGES";
public HistoryStateTypePage(HollowHistoryUI ui) {
super(ui, "history-state-type.vm");
}
@Override
protected void setUpContext(HttpServletRequest req, HollowUISession session, VelocityContext ctx) {
long version = Long.parseLong(req.getParameter("version"));
HistoryStateTypeChanges typeChange = getStateTypeChanges(req, session, ui);
HollowHistoricalState historicalState = ui.getHistory().getHistoricalState(version);
List<String> groupByOptions = new ArrayList<String>(Arrays.asList(historicalState.getKeyOrdinalMapping().getTypeMapping(req.getParameter("type")).getKeyIndex().getKeyFields()));
groupByOptions.removeAll(Arrays.asList(typeChange.getGroupedFieldNames()));
ctx.put("typeChange", typeChange);
ctx.put("headerEntries", getHeaderEntries(historicalState));
ctx.put("groupBy", req.getParameter("groupBy") == null ? "" : req.getParameter("groupBy"));
ctx.put("groupByOptions", groupByOptions);
}
public void sendJson(HttpServletRequest request, HollowUISession session, HttpServletResponse response) {
long version = Long.parseLong(request.getParameter("version"));
HistoryStateTypeChanges typeChange = getStateTypeChanges(request, session, ui);
HollowHistoricalState historicalState = ui.getHistory().getHistoricalState(version);
List<String> groupByOptions = new ArrayList<String>(Arrays.asList(historicalState.getKeyOrdinalMapping().getTypeMapping(request.getParameter("type")).getKeyIndex().getKeyFields()));
groupByOptions.removeAll(Arrays.asList(typeChange.getGroupedFieldNames()));
Map<String, List<List<String>>> changes = new LinkedHashMap<String, List<List<String>>>();
List<List<String>> groups = new ArrayList<List<String>>();
groups.add(groupByOptions);
changes.put("groups", groups);
// handle additions
if(typeChange.getAddedRecords().isEmpty()) {
changes.put("additions", new ArrayList<List<String>>());
} else if(!typeChange.getAddedRecords().hasSubGroups()){
List<RecordDiff> addedDiffs = typeChange.getAddedRecords().getRecordDiffs();
List<List<String>> idRecords = new ArrayList<List<String>>();
for(RecordDiff diff : addedDiffs) {
List<String> data = new ArrayList<String>();
data.add(diff.getIdentifierString());
data.add(new Integer(diff.getKeyOrdinal()).toString());
idRecords.add(data);
}
changes.put("additions", idRecords);
} else {
// This has sub groups
List<List<String>> idRecords = new ArrayList<List<String>>();
for(RecordDiffTreeNode changeGroup : typeChange.getAddedRecords().getSubGroups()) {
List<String> data = new ArrayList<String>();
data.add(changeGroup.getGroupName() + "(" + changeGroup.getDiffCount() + ")");
data.add(changeGroup.getHierarchicalFieldName());
idRecords.add(data);
}
changes.put("additions", idRecords);
}
// handle modifications
if(typeChange.getModifiedRecords().isEmpty()) {
changes.put("modifications", new ArrayList<List<String>>());
} else if(!typeChange.getModifiedRecords().hasSubGroups()){
List<RecordDiff> modifiedDiffs = typeChange.getModifiedRecords().getRecordDiffs();
List<List<String>> idRecords = new ArrayList<List<String>>();
for(RecordDiff diff : modifiedDiffs) {
List<String> data = new ArrayList<String>();
data.add(diff.getIdentifierString());
data.add(new Integer(diff.getKeyOrdinal()).toString());
idRecords.add(data);
}
changes.put("modifications", idRecords);
} else {
// This has sub groups
List<List<String>> idRecords = new ArrayList<List<String>>();
for(RecordDiffTreeNode changeGroup : typeChange.getModifiedRecords().getSubGroups()) {
List<String> data = new ArrayList<String>();
data.add(changeGroup.getGroupName() + "(" + changeGroup.getDiffCount() + ")");
data.add(changeGroup.getHierarchicalFieldName());
idRecords.add(data);
}
changes.put("modifications", idRecords);
}
// handle removals
if(typeChange.getRemovedRecords().isEmpty()) {
changes.put("removals", new ArrayList<List<String>>());
} else if(!typeChange.getRemovedRecords().hasSubGroups()){
List<RecordDiff> removedDiffs = typeChange.getRemovedRecords().getRecordDiffs();
List<List<String>> idRecords = new ArrayList<List<String>>();
for(RecordDiff diff : removedDiffs) {
List<String> data = new ArrayList<String>();
data.add(diff.getIdentifierString());
data.add(new Integer(diff.getKeyOrdinal()).toString());
idRecords.add(data);
}
changes.put("removals", idRecords);
} else {
// This has sub groups
List<List<String>> idRecords = new ArrayList<List<String>>();
for(RecordDiffTreeNode changeGroup : typeChange.getRemovedRecords().getSubGroups()) {
List<String> data = new ArrayList<String>();
data.add(changeGroup.getGroupName() + "(" + changeGroup.getDiffCount() + ")");
data.add(changeGroup.getHierarchicalFieldName());
idRecords.add(data);
}
changes.put("removals", idRecords);
}
try {
PrintWriter out = response.getWriter();
Gson gson = new Gson();
String json = gson.toJson(changes, changes.getClass());
out.println(json);
} catch(IOException e) {
e.printStackTrace();
}
}
public static HistoryStateTypeChanges getStateTypeChanges(HttpServletRequest req, HollowUISession session, HollowHistoryUI ui) {
HistoryStateTypeChanges typeChanges = (HistoryStateTypeChanges) session.getAttribute(STATE_TYPE_CHANGES_SESSION_ATTRIBUTE_NAME);
long version = Long.parseLong(req.getParameter("version"));
String type = req.getParameter("type");
String groupBy = req.getParameter("groupBy");
String[] groupedFieldNames = getGroupedFieldNames(groupBy);
if(typeChanges == null
|| version != typeChanges.getStateVersion()
|| !type.equals(typeChanges.getTypeName())
|| !Arrays.equals(groupedFieldNames, typeChanges.getGroupedFieldNames())) {
HollowHistoricalState historicalState = ui.getHistory().getHistoricalState(Long.parseLong(req.getParameter("version")));
HollowHistoryRecordNamer recordNamer = ui.getHistoryRecordNamer(type);
typeChanges = new HistoryStateTypeChanges(historicalState, type, recordNamer, groupedFieldNames);
session.setAttribute(STATE_TYPE_CHANGES_SESSION_ATTRIBUTE_NAME, typeChanges);
}
return typeChanges;
}
private static String[] getGroupedFieldNames(String groupBy) {
if(groupBy == null)
return new String[0];
return groupBy.split(",");
}
}
| 9,480 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/pages/HistoryOverviewPage.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.pages;
import com.google.gson.Gson;
import com.netflix.hollow.history.ui.HollowHistoryUI;
import com.netflix.hollow.history.ui.VersionTimestampConverter;
import com.netflix.hollow.history.ui.model.HistoryOverviewRow;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateTypeKeyOrdinalMapping;
import com.netflix.hollow.ui.HollowUISession;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.velocity.VelocityContext;
public class HistoryOverviewPage extends HistoryPage {
public HistoryOverviewPage(HollowHistoryUI ui) {
super(ui, "history-overview.vm");
}
@Override
protected void setUpContext(HttpServletRequest req, HollowUISession session, VelocityContext ctx) {
List<HistoryOverviewRow> rows = getHistoryOverview();
ctx.put("overviewDisplayHeaders", ui.getOverviewDisplayHeaders());
ctx.put("overviewRows", rows);
}
public void sendJson(HttpServletRequest request, HttpServletResponse response) {
List<HistoryOverviewRow> rows = getHistoryOverview();
try {
PrintWriter out = response.getWriter();
Gson gson = new Gson();
String json = gson.toJson(rows);
out.println(json);
} catch (IOException e) {
e.printStackTrace();
}
}
private List<HistoryOverviewRow> getHistoryOverview() {
List<HistoryOverviewRow> rows = new ArrayList<HistoryOverviewRow>();
for(HollowHistoricalState state : ui.getHistory().getHistoricalStates()) {
ChangeBreakdown totalBreakdown = new ChangeBreakdown();
Map<String, ChangeBreakdown> topLevelChangesByType = new HashMap<String, ChangeBreakdown>();
for(Map.Entry<String, HollowHistoricalStateTypeKeyOrdinalMapping> entry : state.getKeyOrdinalMapping().getTypeMappings().entrySet()) {
topLevelChangesByType.put(entry.getKey(), new ChangeBreakdown(entry.getValue()));
totalBreakdown.addTypeBreakown(entry.getValue());
}
String[] overviewDisplayHeaderValues = getOverviewDisplayHeaderValues(state, ui.getOverviewDisplayHeaders());
rows.add(new HistoryOverviewRow(VersionTimestampConverter.getTimestamp(state.getVersion(), ui.getTimeZone()), state.getVersion(), totalBreakdown, topLevelChangesByType, overviewDisplayHeaderValues));
}
return rows;
}
private String[] getOverviewDisplayHeaderValues(HollowHistoricalState state, String[] overviewDisplayHeaders) {
String values[] = new String[overviewDisplayHeaders.length];
Map<String, String> nextStateHeaders = getNextStateHeaderTags(state);
for(int i=0;i<overviewDisplayHeaders.length;i++) {
values[i] = nextStateHeaders.get(overviewDisplayHeaders[i]);
}
return values;
}
private Map<String, String> getNextStateHeaderTags(HollowHistoricalState state) {
Map<String, String> toTags = ui.getHistory().getLatestState().getHeaderTags();
if(state.getNextState() != null) {
toTags = state.getNextState().getHeaderEntries();
}
return toTags;
}
public static class ChangeBreakdown {
private int modifiedRecords;
private int addedRecords;
private int removedRecords;
public ChangeBreakdown() { }
public ChangeBreakdown(HollowHistoricalStateTypeKeyOrdinalMapping keyMapping) {
addTypeBreakown(keyMapping);
}
private void addTypeBreakown(HollowHistoricalStateTypeKeyOrdinalMapping keyMapping) {
modifiedRecords += keyMapping.getNumberOfModifiedRecords();
addedRecords += keyMapping.getNumberOfNewRecords();
removedRecords += keyMapping.getNumberOfRemovedRecords();
}
public int getModifiedRecords() {
return modifiedRecords;
}
public int getAddedRecords() {
return addedRecords;
}
public int getRemovedRecords() {
return removedRecords;
}
public int getTotal() {
return modifiedRecords + addedRecords + removedRecords;
}
}
}
| 9,481 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/pages/HistoricalObjectDiffPage.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.pages;
import com.netflix.hollow.diffview.HollowDiffHtmlKickstarter;
import com.netflix.hollow.diffview.HollowObjectView;
import com.netflix.hollow.history.ui.HollowHistoryUI;
import com.netflix.hollow.history.ui.VersionTimestampConverter;
import com.netflix.hollow.history.ui.model.HistoricalObjectChangeVersion;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import com.netflix.hollow.tools.history.HollowHistory;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateTypeKeyOrdinalMapping;
import com.netflix.hollow.ui.HollowUISession;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.apache.velocity.VelocityContext;
public class HistoricalObjectDiffPage extends HistoryPage {
public HistoricalObjectDiffPage(HollowHistoryUI ui) {
super(ui, "history-object.vm");
}
@Override
protected void setUpContext(HttpServletRequest req, HollowUISession session, VelocityContext ctx) {
long version = Long.parseLong(req.getParameter("version"));
String type = req.getParameter("type");
int keyOrdinal = Integer.parseInt(req.getParameter("keyOrdinal"));
ctx.put("version", version);
ctx.put("typeName", type);
ctx.put("keyOrdinal", keyOrdinal);
HollowObjectView objectView = ui.getViewProvider().getObjectView(req, session);
HollowDiffHtmlKickstarter htmlKickstarter = new HollowDiffHtmlKickstarter(ui.getBaseURLPath());
HollowHistory history = ui.getHistory();
ctx.put("initialHtml", htmlKickstarter.initialHtmlRows(objectView));
ctx.put("changeVersions", getChangeVersions(type, keyOrdinal, history));
ctx.put("headerEntries", getHeaderEntries(history.getHistoricalState(version)));
}
private List<HistoricalObjectChangeVersion> getChangeVersions(String type, int keyOrdinal, HollowHistory history) {
List<HistoricalObjectChangeVersion> changeVersions = new ArrayList<HistoricalObjectChangeVersion>();
for(HollowHistoricalState historicalState : history.getHistoricalStates()) {
HollowHistoricalStateTypeKeyOrdinalMapping typeMapping = historicalState.getKeyOrdinalMapping().getTypeMapping(type);
if (typeMapping == null) continue;
if(typeMapping.findAddedOrdinal(keyOrdinal) != -1 || typeMapping.findRemovedOrdinal(keyOrdinal) != -1) {
changeVersions.add(new HistoricalObjectChangeVersion(historicalState.getVersion(), VersionTimestampConverter.getTimestamp(historicalState.getVersion(), ui.getTimeZone())));
}
}
return changeVersions;
}
}
| 9,482 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/pages/HistoryQueryPage.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.pages;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.history.ui.HollowHistoryUI;
import com.netflix.hollow.history.ui.VersionTimestampConverter;
import com.netflix.hollow.history.ui.model.HistoryStateQueryMatches;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import com.netflix.hollow.tools.history.HollowHistory;
import com.netflix.hollow.tools.history.keyindex.HollowHistoryTypeKeyIndex;
import com.netflix.hollow.ui.HollowUISession;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.apache.velocity.VelocityContext;
public class HistoryQueryPage extends HistoryPage {
public HistoryQueryPage(HollowHistoryUI ui) {
super(ui, "history-query.vm");
}
@Override
protected void setUpContext(HttpServletRequest req, HollowUISession session, VelocityContext ctx) {
HollowHistory history = ui.getHistory();
String query = req.getParameter("query");
Map<String, IntList> typeQueryKeyMatches = typeQueryKeyMatches(history, query);
List<HistoryStateQueryMatches> list = new ArrayList<HistoryStateQueryMatches>();
for(HollowHistoricalState state : history.getHistoricalStates()) {
HistoryStateQueryMatches matches = new HistoryStateQueryMatches(state, ui, VersionTimestampConverter.getTimestamp(state.getVersion(), ui.getTimeZone()), typeQueryKeyMatches);
if(matches.hasMatches())
list.add(matches);
}
ctx.put("stateQueryMatchesList", list);
ctx.put("query", query);
}
private Map<String, IntList> typeQueryKeyMatches(HollowHistory history, String query) {
Map<String, IntList> typeQueryKeyMatches = new HashMap<String, IntList>();
for(Map.Entry<String, HollowHistoryTypeKeyIndex> entry : history.getKeyIndex().getTypeKeyIndexes().entrySet()) {
IntList typeQueryResult = entry.getValue().queryIndexedFields(query);
if(typeQueryResult.size() != 0)
typeQueryKeyMatches.put(entry.getKey(), typeQueryResult);
}
return typeQueryKeyMatches;
}
}
| 9,483 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/pages/HistoryPage.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.pages;
import com.netflix.hollow.diff.ui.model.HollowHeaderEntry;
import com.netflix.hollow.history.ui.HollowHistoryUI;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import com.netflix.hollow.ui.HollowUISession;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
public abstract class HistoryPage {
protected final HollowHistoryUI ui;
protected final Template template;
protected final Template headerTemplate;
protected final Template footerTemplate;
public HistoryPage(HollowHistoryUI ui, String templateName) {
this.ui = ui;
this.template = ui.getVelocityEngine().getTemplate(templateName);
this.headerTemplate = ui.getVelocityEngine().getTemplate("history-header.vm");
this.footerTemplate = ui.getVelocityEngine().getTemplate("history-footer.vm");
}
public void render(HttpServletRequest req, HollowUISession session, Writer writer) {
VelocityContext ctx = new VelocityContext();
ctx.put("showHomeLink", !(this instanceof HistoryOverviewPage));
ctx.put("basePath", ui.getBaseURLPath());
try {
setUpContext(req, session, ctx);
} catch (Exception e) {
e.printStackTrace();
throw e;
}
if(includeHeaderAndFooter())
headerTemplate.merge(ctx, writer);
template.merge(ctx, writer);
if(includeHeaderAndFooter())
footerTemplate.merge(ctx, writer);
}
protected abstract void setUpContext(HttpServletRequest req, HollowUISession session, VelocityContext ctx);
protected List<HollowHeaderEntry> getHeaderEntries(HollowHistoricalState state) {
Map<String, String> fromTags = state.getHeaderEntries();
Map<String, String> toTags = ui.getHistory().getLatestState().getHeaderTags();
if(state.getNextState() != null) {
toTags = state.getNextState().getHeaderEntries();
}
Set<String> allKeys = new HashSet<String>();
allKeys.addAll(fromTags.keySet());
allKeys.addAll(toTags.keySet());
List<HollowHeaderEntry> entries = new ArrayList<HollowHeaderEntry>();
int i=0;
for(String key : allKeys) {
entries.add(new HollowHeaderEntry(i++, key, fromTags.get(key), toTags.get(key)));
}
return entries;
}
protected boolean includeHeaderAndFooter() {
return true;
}
}
| 9,484 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/pages/HistoryStatePage.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.pages;
import com.google.gson.Gson;
import com.netflix.hollow.diff.ui.model.HollowHeaderEntry;
import com.netflix.hollow.history.ui.HollowHistoryUI;
import com.netflix.hollow.history.ui.model.HistoryStateTypeChangeSummary;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateTypeKeyOrdinalMapping;
import com.netflix.hollow.ui.HollowUISession;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.velocity.VelocityContext;
public class HistoryStatePage extends HistoryPage {
public HistoryStatePage(HollowHistoryUI ui) {
// Use the following line for the NEW UI
//super(ui, "history-state-enhanced-ui.vm");
// Use the following line for the CLASSIC UI
super(ui, "history-state.vm");
}
@Override
protected void setUpContext(HttpServletRequest req, HollowUISession session, VelocityContext ctx) {
HollowHistoricalState historicalState = ui.getHistory().getHistoricalState(Long.parseLong(req.getParameter("version")));
long nextStateVersion = getNextStateVersion(historicalState);
long prevStateVersion = getPreviousStateVersion(historicalState);
List<HistoryStateTypeChangeSummary> typeChanges = new ArrayList<HistoryStateTypeChangeSummary>();
for(Map.Entry<String, HollowHistoricalStateTypeKeyOrdinalMapping>entry : historicalState.getKeyOrdinalMapping().getTypeMappings().entrySet()) {
HistoryStateTypeChangeSummary typeChange = new HistoryStateTypeChangeSummary(historicalState.getVersion(), entry.getKey(), entry.getValue());
if(!typeChange.isEmpty())
typeChanges.add(typeChange);
}
ctx.put("typeChanges", typeChanges);
ctx.put("headerEntries", getHeaderEntries(historicalState));
ctx.put("currentStateVersion", historicalState.getVersion());
ctx.put("nextStateVersion", nextStateVersion);
ctx.put("prevStateVersion", prevStateVersion);
}
public void sendJson(HttpServletRequest req, HttpServletResponse resp) {
HollowHistoricalState historicalState = ui.getHistory().getHistoricalState(Long.parseLong(req.getParameter("version")));
List<HistoryStateTypeChangeSummary> typeChanges = new ArrayList<HistoryStateTypeChangeSummary>();
for(Map.Entry<String, HollowHistoricalStateTypeKeyOrdinalMapping> entry : historicalState.getKeyOrdinalMapping().getTypeMappings().entrySet()) {
HistoryStateTypeChangeSummary typeChange = new HistoryStateTypeChangeSummary(historicalState.getVersion(), entry.getKey(), entry.getValue());
if(!typeChange.isEmpty())
typeChanges.add(typeChange);
}
List<HollowHeaderEntry> headerEntries = getHeaderEntries(historicalState);
Map<String, String> params = new HashMap<String, String>();
for(HollowHeaderEntry headerEntry : headerEntries) {
String key = headerEntry.getKey();
if(key.equals("VIP")) {
params.put("fromVip", headerEntry.getFromValue());
params.put("toVip", headerEntry.getToValue());
}
if(key.equals("dataVersion")) {
params.put("fromVersion", headerEntry.getFromValue());
params.put("toVersion", headerEntry.getToValue());
}
}
Map<String, Object> data = new HashMap<String, Object>();
data.put("params", params);
data.put("objectTypes", typeChanges);
//resp.setContentType("application/json");
try {
PrintWriter out = resp.getWriter();
Gson gson = new Gson();
String json = gson.toJson(data);
out.println(json);
} catch (IOException e) {
e.printStackTrace();
}
}
private long getNextStateVersion(HollowHistoricalState currentHistoricalState) {
if(currentHistoricalState.getNextState() != null)
return currentHistoricalState.getNextState().getVersion();
return -1;
}
private long getPreviousStateVersion(HollowHistoricalState currentHistoricalState) {
for(HollowHistoricalState state : ui.getHistory().getHistoricalStates()) {
if(state.getNextState() == currentHistoricalState) {
return state.getVersion();
}
}
return -1;
}
@Override
protected boolean includeHeaderAndFooter() {
return false;
}
}
| 9,485 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/history/ui/pages/HistoryStateTypeExpandGroupPage.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.history.ui.pages;
import com.netflix.hollow.history.ui.HollowHistoryUI;
import com.netflix.hollow.history.ui.model.HistoryStateTypeChanges;
import com.netflix.hollow.ui.HollowUISession;
import javax.servlet.http.HttpServletRequest;
import org.apache.velocity.VelocityContext;
public class HistoryStateTypeExpandGroupPage extends HistoryPage {
public HistoryStateTypeExpandGroupPage(HollowHistoryUI ui) {
super(ui, "history-state-type-expand-group.vm");
}
@Override
protected void setUpContext(HttpServletRequest req, HollowUISession session, VelocityContext ctx) {
HistoryStateTypeChanges typeChange = HistoryStateTypePage.getStateTypeChanges(req, session, ui);
String expandGroupId = req.getParameter("expandGroupId");
ctx.put("expandedNode", typeChange.findTreeNode(expandGroupId));
ctx.put("version", req.getParameter("version"));
ctx.put("type", req.getParameter("type"));
}
@Override
protected boolean includeHeaderAndFooter() {
return false;
}
}
| 9,486 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/DiffViewOutputGenerator.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.diffview.effigy.HollowEffigy.Field;
import com.netflix.hollow.ui.HollowUISession;
import java.io.IOException;
import java.io.Writer;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class DiffViewOutputGenerator {
private final HollowObjectViewProvider viewProvider;
public DiffViewOutputGenerator(HollowObjectViewProvider viewProvider) {
this.viewProvider = viewProvider;
}
public void collapseRow(HttpServletRequest req, HttpServletResponse resp) throws IOException {
HollowDiffViewRow row = findRow(req, resp);
for(HollowDiffViewRow child : row.getChildren())
child.setVisibility(false);
resp.getWriter().write("ok");
}
public void uncollapseRow(HttpServletRequest req, HttpServletResponse resp) throws IOException {
HollowDiffViewRow row = findRow(req, resp);
for(HollowDiffViewRow child : row.getChildren())
child.setVisibility(true);
buildChildRowDisplayData(row, resp.getWriter());
}
private HollowDiffViewRow findRow(HttpServletRequest req, HttpServletResponse resp) {
HollowUISession session = HollowUISession.getSession(req, resp);
HollowObjectView objectView = viewProvider.getObjectView(req, session);
int rowPath[] = getRowPath(req.getParameter("row"));
HollowDiffViewRow row = objectView.getRootRow();
for(int i=0;i<rowPath.length;i++)
row = row.getChildren().get(rowPath[i]);
return row;
}
private int[] getRowPath(String rowPathStr) {
String rowPathElementStrings[] = rowPathStr.split("\\.");
int rowPath[] = new int[rowPathElementStrings.length];
for(int i=0;i<rowPathElementStrings.length;i++)
rowPath[i] = Integer.parseInt(rowPathElementStrings[i]);
return rowPath;
}
public static void buildChildRowDisplayData(HollowDiffViewRow parentRow, Writer writer) throws IOException {
buildChildRowDisplayData(parentRow, writer, true);
}
private static void buildChildRowDisplayData(HollowDiffViewRow parentRow, Writer writer, boolean firstRow) throws IOException {
for(HollowDiffViewRow row : parentRow.getChildren()) {
if(row.isVisible()) {
if(firstRow) {
firstRow = false;
} else {
writer.write("|");
}
writeRowPathString(row, writer); writer.write("|");
writer.write(row.getAvailableAction().toString()); writer.write("|");
writer.write(marginIdx(row.getFieldPair().getFromIdx())); writer.write("|");
writer.write(fromCellClassname(row)); writer.write("|");
writer.write(fromContent(row)); writer.write("|");
writer.write(marginIdx(row.getFieldPair().getToIdx())); writer.write("|");
writer.write(toCellClassname(row)); writer.write("|");
writer.write(toContent(row));
buildChildRowDisplayData(row, writer, false);
}
}
}
private static void writeRowPathString(HollowDiffViewRow row, Writer writer) throws IOException {
for(int i=0;i<row.getRowPath().length;i++) {
if(i > 0)
writer.write('.');
writer.write(String.valueOf(row.getRowPath()[i]));
}
}
private static String marginIdx(int idx) {
if(idx == -1)
return "";
return String.valueOf(idx);
}
private static String fromCellClassname(HollowDiffViewRow currentRow) {
if(currentRow.getFieldPair().getTo() == null)
return "delete";
else if(currentRow.getFieldPair().getFrom() == null)
return "empty";
if(currentRow.getFieldPair().getFrom().getValue() == null && currentRow.getFieldPair().getTo().getValue() != null)
return "replace";
if(currentRow.getFieldPair().getFrom().getValue() != null && currentRow.getFieldPair().getTo().getValue() == null)
return "replace";
if(currentRow.getFieldPair().getFrom().getValue() == null && currentRow.getFieldPair().getTo().getValue() == null)
return "equal";
if(currentRow.getFieldPair().isLeafNode() && !currentRow.getFieldPair().getFrom().getValue().equals(currentRow.getFieldPair().getTo().getValue()))
return "replace";
return "equal";
}
private static String toCellClassname(HollowDiffViewRow currentRow) {
if(currentRow.getFieldPair().getFrom() == null)
return "insert";
else if(currentRow.getFieldPair().getTo() == null)
return "empty";
if(currentRow.getFieldPair().getFrom().getValue() == null && currentRow.getFieldPair().getTo().getValue() != null)
return "replace";
if(currentRow.getFieldPair().getFrom().getValue() != null && currentRow.getFieldPair().getTo().getValue() == null)
return "replace";
if(currentRow.getFieldPair().getFrom().getValue() == null && currentRow.getFieldPair().getTo().getValue() == null)
return "equal";
if(currentRow.getFieldPair().isLeafNode() && !currentRow.getFieldPair().getFrom().getValue().equals(currentRow.getFieldPair().getTo().getValue()))
return "replace";
return "equal";
}
private static String fromContent(HollowDiffViewRow row) {
boolean moreRows[] = new boolean[row.getIndentation() + 1];
for(int i=0;i<=row.getIndentation();i++)
moreRows[i] = row.hasMoreFromRows(i);
if(row.getFieldPair().getFrom() == null)
return unpopulatedContent(moreRows);
String fieldName = row.getFieldPair().getFrom().getFieldName();
return populatedContent(moreRows, row.getIndentation(),
row.getFieldPair().isLeafNode(), fieldName, getFieldValue(row, true));
}
private static String toContent(HollowDiffViewRow row) {
boolean moreRows[] = new boolean[row.getIndentation() + 1];
for(int i=0;i<=row.getIndentation();i++)
moreRows[i] = row.hasMoreToRows(i);
if(row.getFieldPair().getTo() == null)
return unpopulatedContent(moreRows);
String fieldName = row.getFieldPair().getTo().getFieldName();
return populatedContent(moreRows, row.getIndentation(), row.getFieldPair().isLeafNode(), fieldName,
getFieldValue(row, false));
}
/**
* Returns a String representation of the provided row's field value. If `useFrom` is
* true, this will use the `from` value from the pair, otherwise this will use the
* `to` value.
*/
private static String getFieldValue(HollowDiffViewRow row, boolean useFrom) {
Field field = useFrom ? row.getFieldPair().getFrom() : row.getFieldPair().getTo();
if (row.getFieldPair().isLeafNode()) {
return field.getValue() == null ? "null"
: field.getValue().toString().replace("|", "│");
} else {
String suffix = field.getValue() == null ? " [null]" : "";
return "(" + field.getTypeName() + ")" + suffix;
}
}
private static String unpopulatedContent(boolean moreRows[]) {
StringBuilder builder = new StringBuilder();
for(int i=0;i<moreRows.length;i++) {
if(moreRows[i]) {
builder.append(" │");
} else {
builder.append(" ");
}
}
return builder.toString();
}
private static String populatedContent(boolean moreRows[], int indentation, boolean leafNode, String fieldName, String value) {
StringBuilder builder = new StringBuilder();
for(int i=0;i<indentation;i++) {
if(moreRows[i]) {
builder.append(".│");
} else {
builder.append("..");
}
}
if(!leafNode) {
if(moreRows[indentation])
builder.append(".┝━┯━>");
else
builder.append(".┕━┯━>");
} else {
if(moreRows[indentation])
builder.append(".├───>");
else
builder.append(".└───>");
}
if(fieldName != null)
builder.append(fieldName).append(": ");
builder.append(value);
return builder.toString();
}
}
| 9,487 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/HollowHistoryViewProvider.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.history.ui.HollowHistoryUI;
import com.netflix.hollow.tools.history.HollowHistoricalState;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateTypeKeyOrdinalMapping;
import com.netflix.hollow.ui.HollowUISession;
import javax.servlet.http.HttpServletRequest;
public class HollowHistoryViewProvider implements HollowObjectViewProvider {
private final HollowHistoryUI historyUI;
public HollowHistoryViewProvider(HollowHistoryUI historyUI) {
this.historyUI = historyUI;
}
@Override
public HollowHistoryView getObjectView(HttpServletRequest req, HollowUISession session) {
long version = Long.parseLong(req.getParameter("version"));
String type = req.getParameter("type");
int keyOrdinal = Integer.parseInt(req.getParameter("keyOrdinal"));
HollowHistoryView objectView = getObjectView(session, version, type, keyOrdinal);
return objectView;
}
private HollowHistoryView getObjectView(HollowUISession session, long version, String type, int keyOrdinal) {
HollowHistoryView objectView = (HollowHistoryView) session.getAttribute("hollow-history-view");
long currentRandomizedTag = historyUI.getHistory().getLatestState().getCurrentRandomizedTag();
if(objectView != null
&& objectView.getHistoricalVersion() == version
&& objectView.getType().equals(type)
&& objectView.getKeyOrdinal() == keyOrdinal
&& objectView.getLatestStateEngineRandomizedTag() == currentRandomizedTag) {
return objectView;
}
HollowHistoricalState historicalState = historyUI.getHistory().getHistoricalState(version);
HollowHistoricalStateTypeKeyOrdinalMapping typeMapping = historicalState.getKeyOrdinalMapping().getTypeMapping(type);
int fromOrdinal = typeMapping.findRemovedOrdinal(keyOrdinal);
int toOrdinal = typeMapping.findAddedOrdinal(keyOrdinal);
HollowDiffViewRow rootRow = new HollowObjectDiffViewGenerator(historicalState.getDataAccess(), historicalState.getDataAccess(), historyUI, type, fromOrdinal, toOrdinal).getHollowDiffViewRows();
objectView = new HollowHistoryView(version, type, keyOrdinal, currentRandomizedTag, rootRow, historyUI.getExactRecordMatcher());
objectView.resetView();
session.setAttribute("hollow-history-view", objectView);
return objectView;
}
}
| 9,488 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/HollowObjectView.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.diffview.effigy.HollowEffigy;
import com.netflix.hollow.diffview.effigy.pairer.HollowEffigyFieldPairer.EffigyFieldPair;
import com.netflix.hollow.diffview.effigy.pairer.exact.ExactRecordMatcher;
public abstract class HollowObjectView {
private static final int MAX_INITIAL_VISIBLE_ROWS_BEFORE_COLLAPSING_DIFFS = 300;
private final HollowDiffViewRow rootRow;
private final ExactRecordMatcher exactRecordMatcher;
private int totalVisibilityCount;
public HollowObjectView(HollowDiffViewRow rootRow, ExactRecordMatcher exactRecordMatcher) {
this.rootRow = rootRow;
this.exactRecordMatcher = exactRecordMatcher;
}
public HollowDiffViewRow getRootRow() {
return rootRow;
}
public void resetView() {
totalVisibilityCount = 0;
int totalVisibleRows = resetViewForDiff(rootRow, 0);
for(HollowDiffViewRow child : rootRow.getChildren())
child.setVisibility(true);
if(totalVisibleRows > MAX_INITIAL_VISIBLE_ROWS_BEFORE_COLLAPSING_DIFFS) {
collapseChildrenUnderRootDiffRows(rootRow);
} else if(totalVisibleRows == 0) {
totalVisibleRows = resetViewForOrderingChanges(rootRow, 0);
if(totalVisibleRows > MAX_INITIAL_VISIBLE_ROWS_BEFORE_COLLAPSING_DIFFS) {
collapseChildrenUnderRootOrderingDiffRows(rootRow);
}
}
}
private int resetViewForDiff(HollowDiffViewRow row, int runningVisibilityCount) {
if(rowIsExactMatch(row))
return 0;
int branchVisibilityCount = 0;
if(row.getFieldPair().isDiff()) {
row.setVisibility(true);
totalVisibilityCount++;
branchVisibilityCount++;
branchVisibilityCount += makeAllChildrenVisible(row, branchVisibilityCount + runningVisibilityCount);
} else {
for(HollowDiffViewRow child : row.getChildren()) {
branchVisibilityCount += resetViewForDiff(child, branchVisibilityCount + runningVisibilityCount);
if(branchVisibilityCount > 0) {
row.setVisibility(true);
totalVisibilityCount++;
branchVisibilityCount++;
}
}
}
return branchVisibilityCount;
}
private int makeAllChildrenVisible(HollowDiffViewRow row, int runningVisibilityCount) {
if(totalVisibilityCount > MAX_INITIAL_VISIBLE_ROWS_BEFORE_COLLAPSING_DIFFS)
return 0;
int branchVisibilityCount = 0;
for(HollowDiffViewRow child : row.getChildren()) {
child.setVisibility(true);
totalVisibilityCount++;
branchVisibilityCount++;
branchVisibilityCount += makeAllChildrenVisible(child, branchVisibilityCount);
}
return branchVisibilityCount;
}
private void collapseChildrenUnderRootDiffRows(HollowDiffViewRow row) {
if(row.areChildrenPopulated()) {
for(HollowDiffViewRow child : row.getChildren()) {
if(child.getFieldPair().isDiff()) {
makeAllChildrenInvisible(child);
} else {
collapseChildrenUnderRootDiffRows(child);
}
}
}
}
private int resetViewForOrderingChanges(HollowDiffViewRow row, int runningVisibilityCount) {
if(rowIsExactMatch(row))
return 0;
int branchVisibilityCount = 0;
if(row.getFieldPair().isOrderingDiff()) {
row.setVisibility(true);
branchVisibilityCount++;
} else {
for(HollowDiffViewRow child : row.getChildren()) {
int childBranchVisibilityCount = resetViewForOrderingChanges(child, runningVisibilityCount + branchVisibilityCount);
if(childBranchVisibilityCount > 0) {
row.setVisibility(true);
branchVisibilityCount += childBranchVisibilityCount;
}
}
}
return branchVisibilityCount;
}
private void collapseChildrenUnderRootOrderingDiffRows(HollowDiffViewRow row) {
if(row.areChildrenPopulated()) {
for(HollowDiffViewRow child : row.getChildren()) {
if(child.getFieldPair().isOrderingDiff()) {
makeAllChildrenInvisible(child);
} else {
collapseChildrenUnderRootOrderingDiffRows(child);
}
}
}
}
private void makeAllChildrenInvisible(HollowDiffViewRow row) {
if(row.areChildrenPopulated()) {
for(HollowDiffViewRow child : row.getChildren()) {
child.setVisibility(false);
makeAllChildrenInvisible(child);
}
}
}
private boolean rowIsExactMatch(HollowDiffViewRow row) {
EffigyFieldPair fieldPair = row.getFieldPair();
if(fieldPair.getFrom() == null || fieldPair.getTo() == null || fieldPair.isLeafNode())
return false;
HollowEffigy fromEffigy = (HollowEffigy)fieldPair.getFrom().getValue();
HollowEffigy toEffigy = (HollowEffigy)fieldPair.getTo().getValue();
if(fromEffigy == null || toEffigy == null)
return false;
return exactRecordMatcher.isExactMatch(fromEffigy.getDataAccess(), fromEffigy.getOrdinal(), toEffigy.getDataAccess(), toEffigy.getOrdinal());
}
}
| 9,489 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/HollowHistoryView.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.diffview.effigy.pairer.exact.ExactRecordMatcher;
public class HollowHistoryView extends HollowObjectView {
private final long historicalVersion;
private final String type;
private final int keyOrdinal;
private final long latestStateEngineRandomizedTag;
public HollowHistoryView(long historicalVersion, String type, int keyOrdinal, long latestStateEngineRandomizedTag, HollowDiffViewRow rootRow, ExactRecordMatcher exactRecordMatcher) {
super(rootRow, exactRecordMatcher);
this.historicalVersion = historicalVersion;
this.type = type;
this.keyOrdinal = keyOrdinal;
this.latestStateEngineRandomizedTag = latestStateEngineRandomizedTag;
}
public long getHistoricalVersion() {
return historicalVersion;
}
public String getType() {
return type;
}
public int getKeyOrdinal() {
return keyOrdinal;
}
public long getLatestStateEngineRandomizedTag() {
return latestStateEngineRandomizedTag;
}
}
| 9,490 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/HollowDiffViewProvider.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.diff.ui.HollowDiffUI;
import com.netflix.hollow.ui.HollowUISession;
import javax.servlet.http.HttpServletRequest;
public class HollowDiffViewProvider implements HollowObjectViewProvider {
private final HollowDiffUI diffUI;
public HollowDiffViewProvider(HollowDiffUI diffUI) {
this.diffUI = diffUI;
}
@Override
public HollowDiffView getObjectView(HttpServletRequest req, HollowUISession session) {
String type = req.getParameter("type");
int fromOrdinal = Integer.parseInt(req.getParameter("fromOrdinal"));
int toOrdinal = Integer.parseInt(req.getParameter("toOrdinal"));
HollowDiffView objectView = getObjectView(session, type, fromOrdinal, toOrdinal);
return objectView;
}
private HollowDiffView getObjectView(HollowUISession session, String type, int fromOrdinal, int toOrdinal) {
HollowDiffView objectView = (HollowDiffView) session.getAttribute("hollow-diff-view");
if(objectView != null
&& objectView.getType().equals(type)
&& objectView.getToOrdinal() == toOrdinal
&& objectView.getFromOrdinal() == fromOrdinal) {
return objectView;
}
HollowDiffViewRow rootRow = new HollowObjectDiffViewGenerator(diffUI.getDiff().getFromStateEngine(), diffUI.getDiff().getToStateEngine(), diffUI, type, fromOrdinal, toOrdinal).getHollowDiffViewRows();
objectView = new HollowDiffView(type, fromOrdinal, toOrdinal, rootRow, diffUI.getExactRecordMatcher());
objectView.resetView();
session.setAttribute("hollow-diff-view", objectView);
return objectView;
}
}
| 9,491 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/HollowDiffView.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.diffview.effigy.pairer.exact.ExactRecordMatcher;
public class HollowDiffView extends HollowObjectView {
private final String type;
private final int fromOrdinal;
private final int toOrdinal;
public HollowDiffView(String type, int fromOrdinal, int toOrdinal, HollowDiffViewRow rootRow, ExactRecordMatcher exactRecordMatcher) {
super(rootRow, exactRecordMatcher);
this.type = type;
this.fromOrdinal = fromOrdinal;
this.toOrdinal = toOrdinal;
}
public String getType() {
return type;
}
public int getFromOrdinal() {
return fromOrdinal;
}
public int getToOrdinal() {
return toOrdinal;
}
}
| 9,492 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/HollowDiffHtmlKickstarter.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.diffview.HollowDiffViewRow.Action;
import java.io.IOException;
import java.io.StringWriter;
public class HollowDiffHtmlKickstarter {
private final String baseURL;
public HollowDiffHtmlKickstarter(String baseURL) {
this.baseURL = baseURL;
}
public String initialHtmlRows(HollowObjectView objectView) {
String diffViewOutput = null;
try {
StringWriter writer = new StringWriter();
DiffViewOutputGenerator.buildChildRowDisplayData(objectView.getRootRow(), writer);
diffViewOutput = writer.toString();
} catch(IOException unexpected) {
throw new RuntimeException(unexpected);
}
StringBuilder initialHtml = new StringBuilder();
StringTokenizer tokenizer = new StringTokenizer(diffViewOutput);
while(tokenizer.hasMoreTokens()) {
String rowPath = tokenizer.nextToken();
Action action = Action.valueOf(tokenizer.nextToken());
String origFromIndexValue = tokenizer.nextToken();
String fromCellClassname = tokenizer.nextToken();
String fromCellContent = tokenizer.nextToken();
String origToIndexValue = tokenizer.nextToken();
String toCellClassname = tokenizer.nextToken();
String toCellContent = tokenizer.nextToken();
initialHtml.append("<tr id=\"r").append(rowPath).append("\"");
if(action == Action.PARTIAL_UNCOLLAPSE || action == Action.UNCOLLAPSE) {
initialHtml.append(" onclick=\"uncollapseRow('" + rowPath + "')\"");
} else if(action == Action.COLLAPSE) {
initialHtml.append(" onclick=\"collapseRow('" + rowPath + "')\"");
}
initialHtml.append(">");
if(action == Action.PARTIAL_UNCOLLAPSE) {
initialHtml.append("<td class=\"margin\">").append("<img src=\""+baseURL+"/resource/partial_expand.png\"/>").append("</td>");
} else if(action == Action.UNCOLLAPSE) {
initialHtml.append("<td class=\"margin\">").append("<img src=\""+baseURL+"/resource/expand.png\"/>").append("</td>");
} else if(action == Action.COLLAPSE) {
initialHtml.append("<td class=\"margin\">").append("<img src=\""+baseURL+"/resource/collapse.png\"/>").append("</td>");
} else {
initialHtml.append("<td class=\"margin\"/>");
}
if(!"".equals(origFromIndexValue))
initialHtml.append("<td class=\"margin\">").append(origFromIndexValue).append("</td>");
else
initialHtml.append("<td class=\"margin\"/>");
initialHtml.append("<td class=\"").append(fromCellClassname).append("\">").append(fromCellContent).append("</td>");
if(action == Action.PARTIAL_UNCOLLAPSE) {
initialHtml.append("<td class=\"margin\">").append("<img src=\""+baseURL+"/resource/partial_expand.png\"/>").append("</td>");
} else if(action == Action.UNCOLLAPSE) {
initialHtml.append("<td class=\"margin\">").append("<img src=\""+baseURL+"/resource/expand.png\"/>").append("</td>");
} else if(action == Action.COLLAPSE) {
initialHtml.append("<td class=\"margin\">").append("<img src=\""+baseURL+"/resource/collapse.png\"/>").append("</td>");
} else {
initialHtml.append("<td class=\"margin\"/>");
}
if(!"".equals(origToIndexValue))
initialHtml.append("<td class=\"margin\">").append(origToIndexValue).append("</td>");
else
initialHtml.append("<td class=\"margin\"/>");
initialHtml.append("<td class=\"").append(toCellClassname).append("\">").append(toCellContent).append("</td>");
}
return initialHtml.toString();
}
private static class StringTokenizer {
private final String str;
private int cursor;
public StringTokenizer(String str) {
this.str = str;
}
public String nextToken() {
int endIdx = str.indexOf('|', cursor);
String token;
if(endIdx != -1) {
token = str.substring(cursor, endIdx);
cursor = endIdx+1;
} else {
token = str.substring(cursor);
cursor = -1;
}
return token;
}
public boolean hasMoreTokens() {
return str.length() > 0 && cursor >= 0;
}
}
}
| 9,493 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/HollowDiffViewRow.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.diffview.effigy.pairer.HollowEffigyFieldPairer.EffigyFieldPair;
import java.util.List;
public class HollowDiffViewRow {
private final int[] rowPath;
private final EffigyFieldPair fieldPair;
private final HollowDiffViewRow parent;
private final HollowObjectDiffViewGenerator viewGenerator;
private boolean isVisible;
private List<HollowDiffViewRow> children;
private long moreFromRowsBits = -1;
private long moreToRowsBits = -1;
public HollowDiffViewRow(EffigyFieldPair fieldPair, int[] rowPath, HollowDiffViewRow parent, HollowObjectDiffViewGenerator viewGenerator) {
this.fieldPair = fieldPair;
this.rowPath = rowPath;
this.parent = parent;
this.viewGenerator = viewGenerator;
this.isVisible = false;
}
public boolean areChildrenPopulated() {
return children != null;
}
public EffigyFieldPair getFieldPair() {
return fieldPair;
}
public int[] getRowPath() {
return rowPath;
}
public HollowDiffViewRow getParent() {
return parent;
}
public int getIndentation() {
return rowPath.length;
}
public void setVisibility(boolean isVisible) {
this.isVisible = isVisible;
}
public boolean isVisible() {
return isVisible;
}
public Action getAvailableAction() {
if(getChildren().isEmpty())
return Action.NONE;
boolean foundVisibleChild = false;
boolean foundInvisibleChild = false;
for(HollowDiffViewRow child : children) {
if(child.isVisible()) {
if(foundInvisibleChild)
return Action.PARTIAL_UNCOLLAPSE;
foundVisibleChild = true;
} else {
if(foundVisibleChild)
return Action.PARTIAL_UNCOLLAPSE;
foundInvisibleChild = true;
}
}
return foundVisibleChild ? Action.COLLAPSE : Action.UNCOLLAPSE;
}
public List<HollowDiffViewRow> getChildren() {
if(children == null) {
children = viewGenerator.traverseEffigyToCreateViewRows(this);
}
return children;
}
public boolean hasMoreFromRows(int indentation) {
if(moreFromRowsBits == -1)
buildMoreRowsBits();
return (moreFromRowsBits & (1 << indentation)) != 0;
}
public boolean hasMoreToRows(int indentation) {
if(moreToRowsBits == -1)
buildMoreRowsBits();
return (moreToRowsBits & (1 << indentation)) != 0;
}
private void buildMoreRowsBits() {
HollowDiffViewRow ancestor = this.parent;
moreFromRowsBits = 0;
moreToRowsBits = 0;
for(int i=rowPath.length;i>=1;i--) {
if(moreRows(ancestor, rowPath[i-1], true))
moreFromRowsBits |= 1 << i;
if(moreRows(ancestor, rowPath[i-1], false))
moreToRowsBits |= 1 << i;
ancestor = ancestor.getParent();
}
}
private boolean moreRows(HollowDiffViewRow parent, int childIdx, boolean from) {
for(int i=childIdx+1;i<parent.getChildren().size();i++) {
EffigyFieldPair fieldPair = parent.getChildren().get(i).getFieldPair();
if((from && fieldPair.getFrom() != null) || (!from && fieldPair.getTo() != null))
return true;
}
return false;
}
public static enum Action {
COLLAPSE,
UNCOLLAPSE,
PARTIAL_UNCOLLAPSE,
NONE
}
}
| 9,494 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/HollowObjectViewProvider.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.ui.HollowUISession;
import javax.servlet.http.HttpServletRequest;
public interface HollowObjectViewProvider {
public HollowObjectView getObjectView(HttpServletRequest req, HollowUISession session);
}
| 9,495 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/HollowObjectDiffViewGenerator.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.diffview.effigy.CustomHollowEffigyFactory;
import com.netflix.hollow.diffview.effigy.HollowEffigy;
import com.netflix.hollow.diffview.effigy.HollowEffigy.Field;
import com.netflix.hollow.diffview.effigy.HollowEffigyFactory;
import com.netflix.hollow.diffview.effigy.HollowRecordDiffUI;
import com.netflix.hollow.diffview.effigy.pairer.HollowEffigyFieldPairer;
import com.netflix.hollow.diffview.effigy.pairer.HollowEffigyFieldPairer.EffigyFieldPair;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class HollowObjectDiffViewGenerator {
private final HollowDataAccess fromDataAccess;
private final HollowDataAccess toDataAccess;
private final HollowRecordDiffUI diffUI;
private final String typeName;
private final int fromOrdinal;
private final int toOrdinal;
public HollowObjectDiffViewGenerator(HollowDataAccess fromDataAccess, HollowDataAccess toDataAccess, HollowRecordDiffUI diffUI, String typeName, int fromOrdinal, int toOrdinal) {
this.fromDataAccess = fromDataAccess;
this.toDataAccess = toDataAccess;
this.diffUI = diffUI;
this.typeName = typeName;
this.fromOrdinal = fromOrdinal;
this.toOrdinal = toOrdinal;
}
public HollowDiffViewRow getHollowDiffViewRows() {
HollowEffigy fromEffigy, toEffigy;
if(diffUI != null && diffUI.getCustomHollowEffigyFactory(typeName) != null) {
CustomHollowEffigyFactory effigyFactory = diffUI.getCustomHollowEffigyFactory(typeName);
synchronized(effigyFactory) {
effigyFactory.setFromHollowRecord(fromDataAccess.getTypeDataAccess(typeName), fromOrdinal);
effigyFactory.setToHollowRecord(toDataAccess.getTypeDataAccess(typeName), toOrdinal);
effigyFactory.generateEffigies();
fromEffigy = effigyFactory.getFromEffigy();
toEffigy = effigyFactory.getToEffigy();
}
} else {
HollowEffigyFactory effigyFactory = new HollowEffigyFactory();
fromEffigy = fromOrdinal == -1 ? null : effigyFactory.effigy(fromDataAccess, typeName, fromOrdinal);
toEffigy = toOrdinal == -1 ? null : effigyFactory.effigy(toDataAccess, typeName, toOrdinal);
}
HollowDiffViewRow rootRow = createRootRow(fromEffigy, toEffigy);
traverseEffigyToCreateViewRows(rootRow);
return rootRow;
}
List<HollowDiffViewRow> traverseEffigyToCreateViewRows(HollowDiffViewRow parent) {
if(parent.getFieldPair().isLeafNode())
return Collections.emptyList();
Field fromField = parent.getFieldPair().getFrom();
Field toField = parent.getFieldPair().getTo();
HollowEffigy from = fromField == null ? null : (HollowEffigy) fromField.getValue();
HollowEffigy to = toField == null ? null : (HollowEffigy) toField.getValue();
List<EffigyFieldPair> pairs = HollowEffigyFieldPairer.pair(from, to, diffUI.getMatchHints());
List<HollowDiffViewRow> childRows = new ArrayList<HollowDiffViewRow>();
for(int i=0;i<pairs.size();i++) {
EffigyFieldPair pair = pairs.get(i);
int indentation = parent.getRowPath().length + 1;
int rowPath[] = Arrays.copyOf(parent.getRowPath(), indentation);
rowPath[rowPath.length - 1] = i;
childRows.add(new HollowDiffViewRow(pair, rowPath, parent, this));
}
return childRows;
}
private HollowDiffViewRow createRootRow(HollowEffigy fromEffigy, HollowEffigy toEffigy) {
HollowEffigy.Field fromField = fromEffigy == null ? null : new HollowEffigy.Field(null, fromEffigy);
HollowEffigy.Field toField = toEffigy == null ? null : new HollowEffigy.Field(null, toEffigy);
EffigyFieldPair fieldPair = new EffigyFieldPair(fromField, toField, -1, -1);
return new HollowDiffViewRow(fieldPair, new int[0], null, this);
}
}
| 9,496 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/effigy/HollowEffigyFactory.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview.effigy;
import static com.netflix.hollow.core.read.iterator.HollowOrdinalIterator.NO_MORE_ORDINALS;
import com.netflix.hollow.core.read.dataaccess.HollowCollectionTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.core.schema.HollowCollectionSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.diffview.effigy.HollowEffigy.Field;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Base64;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class HollowEffigyFactory {
private final Base64.Encoder base64 = Base64.getEncoder();
private final Map<HollowEffigy.Field, HollowEffigy.Field> fieldMemoizer = new HashMap<HollowEffigy.Field, HollowEffigy.Field>();
public HollowEffigy effigy(HollowDataAccess dataAccess, String typeName, int ordinal) {
if(ordinal == -1)
return null;
HollowTypeDataAccess typeState = dataAccess.getTypeDataAccess(typeName, ordinal);
if(typeState == null)
return null;
if(typeState instanceof HollowObjectTypeDataAccess) {
return new HollowEffigy(this, (HollowObjectTypeDataAccess) typeState, ordinal);
} else if(typeState instanceof HollowCollectionTypeDataAccess) {
return new HollowEffigy(this, (HollowCollectionTypeDataAccess) typeState, ordinal);
} else if(typeState instanceof HollowMapTypeDataAccess){
return new HollowEffigy(this, (HollowMapTypeDataAccess) typeState, ordinal);
}
throw new IllegalArgumentException("I don't know how to effigize a " + typeState.getClass());
}
List<Field> createFields(HollowEffigy effigy) {
switch(effigy.dataAccess.getSchema().getSchemaType()) {
case OBJECT:
return createObjectFields(effigy);
case LIST:
case SET:
return createCollectionFields(effigy);
case MAP:
return createMapFields(effigy);
}
throw new IllegalArgumentException();
}
List<Field> createObjectFields(HollowEffigy effigy) {
List<Field>fields = new ArrayList<Field>();
HollowObjectTypeDataAccess typeDataAccess = (HollowObjectTypeDataAccess)effigy.dataAccess;
HollowObjectSchema schema = typeDataAccess.getSchema();
for(int i=0;i<schema.numFields();i++) {
String fieldName = schema.getFieldName(i);
String fieldType = schema.getFieldType(i) == FieldType.REFERENCE ? schema.getReferencedType(i) : schema.getFieldType(i).toString();
Object fieldValue = null;
switch(schema.getFieldType(i)) {
case BOOLEAN:
fieldValue = typeDataAccess.readBoolean(effigy.ordinal, i);
break;
case BYTES:
byte[] fieldValueBytes = typeDataAccess.readBytes(effigy.ordinal, i);
if (fieldValueBytes == null || fieldValueBytes.length == 0) {
fieldValue = fieldValueBytes;
} else {
fieldValue = base64.encodeToString(fieldValueBytes);
}
break;
case DOUBLE:
fieldValue = Double.valueOf(typeDataAccess.readDouble(effigy.ordinal, i));
break;
case FLOAT:
fieldValue = Float.valueOf(typeDataAccess.readFloat(effigy.ordinal, i));
break;
case INT:
fieldValue = Integer.valueOf(typeDataAccess.readInt(effigy.ordinal, i));
break;
case LONG:
long longVal = typeDataAccess.readLong(effigy.ordinal, i);
if(longVal != Long.MIN_VALUE && "Date".equals(typeDataAccess.getSchema().getName())) {
SimpleDateFormat formatter = new SimpleDateFormat("EEE MMM dd HH:mm:ss.SSS z yyyy");
fieldValue = formatter.format(new Date(longVal));
} else {
fieldValue = Long.valueOf(typeDataAccess.readLong(effigy.ordinal, i));
}
break;
case STRING:
fieldValue = typeDataAccess.readString(effigy.ordinal, i);
break;
case REFERENCE:
fieldValue = effigy(typeDataAccess.getDataAccess(), schema.getReferencedType(i), typeDataAccess.readOrdinal(effigy.ordinal, i));
}
Field field = new Field(fieldName, fieldType, fieldValue);
if(schema.getFieldType(i) != FieldType.REFERENCE)
field = memoize(field);
fields.add(field);
}
return fields;
}
private List<Field> createCollectionFields(HollowEffigy effigy) {
List<Field> fields = new ArrayList<Field>();
HollowCollectionTypeDataAccess typeDataAccess = (HollowCollectionTypeDataAccess) effigy.dataAccess;
HollowCollectionSchema schema = typeDataAccess.getSchema();
HollowOrdinalIterator iter = typeDataAccess.ordinalIterator(effigy.ordinal);
int elementOrdinal = iter.next();
while(elementOrdinal != NO_MORE_ORDINALS) {
HollowEffigy elementEffigy = effigy(typeDataAccess.getDataAccess(), schema.getElementType(), elementOrdinal);
fields.add(new Field("element", elementEffigy));
elementOrdinal = iter.next();
}
return fields;
}
private List<Field> createMapFields(HollowEffigy effigy) {
List<Field> fields = new ArrayList<Field>();
HollowMapTypeDataAccess typeDataAccess = (HollowMapTypeDataAccess)effigy.dataAccess;
HollowMapSchema schema = typeDataAccess.getSchema();
HollowMapEntryOrdinalIterator iter = typeDataAccess.ordinalIterator(effigy.ordinal);
while(iter.next()) {
HollowEffigy entryEffigy = new HollowEffigy("Map.Entry");
entryEffigy.add(new Field("key", effigy(typeDataAccess.getDataAccess(), schema.getKeyType(), iter.getKey())));
entryEffigy.add(new Field("value", effigy(typeDataAccess.getDataAccess(), schema.getValueType(), iter.getValue())));
fields.add(new Field("entry", "Map.Entry", entryEffigy));
}
return fields;
}
private HollowEffigy.Field memoize(HollowEffigy.Field field) {
Field canonical = fieldMemoizer.get(field);
if(canonical == null) {
fieldMemoizer.put(field, field);
canonical = field;
}
return canonical;
}
}
| 9,497 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/effigy/CustomHollowEffigyFactory.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview.effigy;
import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess;
public interface CustomHollowEffigyFactory {
/**
* Set the from record, called before generateEffigies
*/
public void setFromHollowRecord(HollowTypeDataAccess fromState, int ordinal);
/**
* Set the to record, called before generateEffigies
*/
public void setToHollowRecord(HollowTypeDataAccess toState, int ordinal);
/**
* Generate the effigies, called before getFromEffigy and getToEffigy
*/
public void generateEffigies();
public HollowEffigy getFromEffigy();
public HollowEffigy getToEffigy();
}
| 9,498 |
0 |
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview
|
Create_ds/hollow/hollow-diff-ui/src/main/java/com/netflix/hollow/diffview/effigy/HollowRecordDiffUI.java
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.diffview.effigy;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.diffview.effigy.pairer.exact.ExactRecordMatcher;
import java.util.Map;
public interface HollowRecordDiffUI {
public Map<String, PrimaryKey> getMatchHints();
public CustomHollowEffigyFactory getCustomHollowEffigyFactory(String typeName);
public ExactRecordMatcher getExactRecordMatcher();
}
| 9,499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.